modal 1.2.1.dev11__py3-none-any.whl → 1.2.1.dev13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

modal/client.pyi CHANGED
@@ -33,7 +33,7 @@ class _Client:
33
33
  server_url: str,
34
34
  client_type: int,
35
35
  credentials: typing.Optional[tuple[str, str]],
36
- version: str = "1.2.1.dev11",
36
+ version: str = "1.2.1.dev13",
37
37
  ):
38
38
  """mdmd:hidden
39
39
  The Modal client object is not intended to be instantiated directly by users.
@@ -164,7 +164,7 @@ class Client:
164
164
  server_url: str,
165
165
  client_type: int,
166
166
  credentials: typing.Optional[tuple[str, str]],
167
- version: str = "1.2.1.dev11",
167
+ version: str = "1.2.1.dev13",
168
168
  ):
169
169
  """mdmd:hidden
170
170
  The Modal client object is not intended to be instantiated directly by users.
@@ -155,8 +155,16 @@ class _ContainerProcess(Generic[T]):
155
155
  on_connect = asyncio.Event()
156
156
 
157
157
  async def _write_to_fd_loop(stream: _StreamReader):
158
+ # This is required to make modal shell to an existing task work,
159
+ # since that uses ContainerExec RPCs directly, but this is hacky.
160
+ #
161
+ # TODO(saltzm): Once we use the new exec path for that use case, this code can all be removed.
162
+ from .io_streams import _StreamReaderThroughServer
163
+
164
+ assert isinstance(stream._impl, _StreamReaderThroughServer)
165
+ stream_impl = stream._impl
158
166
  # Don't skip empty messages so we can detect when the process has booted.
159
- async for chunk in stream._get_logs(skip_empty_messages=False):
167
+ async for chunk in stream_impl._get_logs(skip_empty_messages=False):
160
168
  if chunk is None:
161
169
  break
162
170
 
modal/io_streams.py CHANGED
@@ -1,7 +1,9 @@
1
1
  # Copyright Modal Labs 2022
2
2
  import asyncio
3
+ import codecs
3
4
  import time
4
5
  from collections.abc import AsyncGenerator, AsyncIterator
6
+ from dataclasses import dataclass
5
7
  from typing import (
6
8
  TYPE_CHECKING,
7
9
  Generic,
@@ -15,11 +17,12 @@ from typing import (
15
17
  from grpclib import Status
16
18
  from grpclib.exceptions import GRPCError, StreamTerminatedError
17
19
 
18
- from modal.exception import ClientClosed, InvalidError
20
+ from modal.exception import ClientClosed, ExecTimeoutError, InvalidError
19
21
  from modal_proto import api_pb2
20
22
 
21
23
  from ._utils.async_utils import synchronize_api
22
24
  from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
25
+ from ._utils.task_command_router_client import TaskCommandRouterClient
23
26
  from .client import _Client
24
27
  from .config import logger
25
28
  from .stream_type import StreamType
@@ -83,27 +86,8 @@ async def _container_process_logs_iterator(
83
86
  T = TypeVar("T", str, bytes)
84
87
 
85
88
 
86
- class _StreamReader(Generic[T]):
87
- """Retrieve logs from a stream (`stdout` or `stderr`).
88
-
89
- As an asynchronous iterable, the object supports the `for` and `async for`
90
- statements. Just loop over the object to read in chunks.
91
-
92
- **Usage**
93
-
94
- ```python fixture:running_app
95
- from modal import Sandbox
96
-
97
- sandbox = Sandbox.create(
98
- "bash",
99
- "-c",
100
- "for i in $(seq 1 10); do echo foo; sleep 0.1; done",
101
- app=running_app,
102
- )
103
- for message in sandbox.stdout:
104
- print(f"Message: {message}")
105
- ```
106
- """
89
+ class _StreamReaderThroughServer(Generic[T]):
90
+ """A StreamReader implementation that reads from the server."""
107
91
 
108
92
  _stream: Optional[AsyncGenerator[Optional[bytes], None]]
109
93
 
@@ -133,10 +117,6 @@ class _StreamReader(Generic[T]):
133
117
  if object_type == "sandbox" and not text:
134
118
  raise ValueError("Sandbox streams must have text mode enabled.")
135
119
 
136
- # line-buffering is only supported when text=True
137
- if by_line and not text:
138
- raise ValueError("line-buffering is only supported when text=True")
139
-
140
120
  self._text = text
141
121
  self._by_line = by_line
142
122
 
@@ -166,19 +146,7 @@ class _StreamReader(Generic[T]):
166
146
  return self._file_descriptor
167
147
 
168
148
  async def read(self) -> T:
169
- """Fetch the entire contents of the stream until EOF.
170
-
171
- **Usage**
172
-
173
- ```python fixture:running_app
174
- from modal import Sandbox
175
-
176
- sandbox = Sandbox.create("echo", "hello", app=running_app)
177
- sandbox.wait()
178
-
179
- print(sandbox.stdout.read())
180
- ```
181
- """
149
+ """Fetch the entire contents of the stream until EOF."""
182
150
  data_str = ""
183
151
  data_bytes = b""
184
152
  logger.debug(f"{self._object_id} StreamReader fd={self._file_descriptor} read starting")
@@ -330,11 +298,6 @@ class _StreamReader(Generic[T]):
330
298
  self._stream = self._get_logs()
331
299
  return self._stream
332
300
 
333
- def __aiter__(self) -> AsyncIterator[T]:
334
- """mdmd:hidden"""
335
- self._ensure_stream()
336
- return self
337
-
338
301
  async def __anext__(self) -> T:
339
302
  """mdmd:hidden"""
340
303
  stream = self._ensure_stream()
@@ -356,10 +319,293 @@ class _StreamReader(Generic[T]):
356
319
  await self._stream.aclose()
357
320
 
358
321
 
322
+ async def _decode_bytes_stream_to_str(stream: AsyncGenerator[bytes, None]) -> AsyncGenerator[str, None]:
323
+ """Incrementally decode a bytes async generator as UTF-8 without breaking on chunk boundaries.
324
+
325
+ This function uses a streaming UTF-8 decoder so that multi-byte characters split across
326
+ chunks are handled correctly instead of raising ``UnicodeDecodeError``.
327
+ """
328
+ decoder = codecs.getincrementaldecoder("utf-8")(errors="strict")
329
+ async for item in stream:
330
+ text = decoder.decode(item, final=False)
331
+ if text:
332
+ yield text
333
+ # Flush any buffered partial character at end-of-stream
334
+ tail = decoder.decode(b"", final=True)
335
+ if tail:
336
+ yield tail
337
+
338
+
339
+ async def _stream_by_line(stream: AsyncGenerator[bytes, None]) -> AsyncGenerator[bytes, None]:
340
+ """Yield complete lines only (ending with \n), buffering partial lines until complete."""
341
+ line_buffer = b""
342
+ async for message in stream:
343
+ assert isinstance(message, bytes)
344
+ line_buffer += message
345
+ while b"\n" in line_buffer:
346
+ line, line_buffer = line_buffer.split(b"\n", 1)
347
+ yield line + b"\n"
348
+
349
+ if line_buffer:
350
+ yield line_buffer
351
+
352
+
353
+ @dataclass
354
+ class _StreamReaderThroughCommandRouterParams:
355
+ file_descriptor: "api_pb2.FileDescriptor.ValueType"
356
+ task_id: str
357
+ object_id: str
358
+ command_router_client: TaskCommandRouterClient
359
+ deadline: Optional[float]
360
+
361
+
362
+ async def _stdio_stream_from_command_router(
363
+ params: _StreamReaderThroughCommandRouterParams,
364
+ ) -> AsyncGenerator[bytes, None]:
365
+ """Stream raw bytes from the router client."""
366
+ stream = params.command_router_client.exec_stdio_read(
367
+ params.task_id, params.object_id, params.file_descriptor, params.deadline
368
+ )
369
+ try:
370
+ async for item in stream:
371
+ if len(item.data) == 0:
372
+ # This is an error.
373
+ raise ValueError("Received empty message streaming stdio from sandbox.")
374
+
375
+ yield item.data
376
+ except ExecTimeoutError:
377
+ logger.debug(f"Deadline exceeded while streaming stdio for exec {params.object_id}")
378
+ # TODO(saltzm): This is a weird API, but customers currently may rely on it. We
379
+ # should probably raise this error rather than just ending the stream.
380
+ return
381
+
382
+
383
+ class _BytesStreamReaderThroughCommandRouter(Generic[T]):
384
+ """
385
+ StreamReader implementation that will read directly from the worker that
386
+ hosts the sandbox.
387
+
388
+ This implementation is used for non-text streams.
389
+ """
390
+
391
+ def __init__(
392
+ self,
393
+ params: _StreamReaderThroughCommandRouterParams,
394
+ ) -> None:
395
+ self._params = params
396
+ self._stream = None
397
+
398
+ @property
399
+ def file_descriptor(self) -> int:
400
+ return self._params.file_descriptor
401
+
402
+ async def read(self) -> T:
403
+ data_bytes = b""
404
+ async for part in self:
405
+ data_bytes += cast(bytes, part)
406
+ return cast(T, data_bytes)
407
+
408
+ def __aiter__(self) -> AsyncIterator[T]:
409
+ return self
410
+
411
+ async def __anext__(self) -> T:
412
+ if self._stream is None:
413
+ self._stream = _stdio_stream_from_command_router(self._params)
414
+ # This raises StopAsyncIteration if the stream is at EOF.
415
+ return cast(T, await self._stream.__anext__())
416
+
417
+ async def aclose(self):
418
+ if self._stream:
419
+ await self._stream.aclose()
420
+
421
+
422
+ class _TextStreamReaderThroughCommandRouter(Generic[T]):
423
+ """
424
+ StreamReader implementation that will read directly from the worker
425
+ that hosts the sandbox.
426
+
427
+ This implementation is used for text streams.
428
+ """
429
+
430
+ def __init__(
431
+ self,
432
+ params: _StreamReaderThroughCommandRouterParams,
433
+ by_line: bool,
434
+ ) -> None:
435
+ self._params = params
436
+ self._by_line = by_line
437
+ self._stream = None
438
+
439
+ @property
440
+ def file_descriptor(self) -> int:
441
+ return self._params.file_descriptor
442
+
443
+ async def read(self) -> T:
444
+ data_str = ""
445
+ async for part in self:
446
+ data_str += cast(str, part)
447
+ return cast(T, data_str)
448
+
449
+ def __aiter__(self) -> AsyncIterator[T]:
450
+ return self
451
+
452
+ async def __anext__(self) -> T:
453
+ if self._stream is None:
454
+ bytes_stream = _stdio_stream_from_command_router(self._params)
455
+ if self._by_line:
456
+ self._stream = _decode_bytes_stream_to_str(_stream_by_line(bytes_stream))
457
+ else:
458
+ self._stream = _decode_bytes_stream_to_str(bytes_stream)
459
+ # This raises StopAsyncIteration if the stream is at EOF.
460
+ return cast(T, await self._stream.__anext__())
461
+
462
+ async def aclose(self):
463
+ if self._stream:
464
+ await self._stream.aclose()
465
+
466
+
467
+ class _DevnullStreamReader(Generic[T]):
468
+ """StreamReader implementation for a stream configured with
469
+ StreamType.DEVNULL. Throws an error if read or any other method is
470
+ called.
471
+ """
472
+
473
+ def __init__(self, file_descriptor: "api_pb2.FileDescriptor.ValueType") -> None:
474
+ self._file_descriptor = file_descriptor
475
+
476
+ @property
477
+ def file_descriptor(self) -> int:
478
+ return self._file_descriptor
479
+
480
+ async def read(self) -> T:
481
+ raise ValueError("read is not supported for a stream configured with StreamType.DEVNULL")
482
+
483
+ def __aiter__(self) -> AsyncIterator[T]:
484
+ raise ValueError("__aiter__ is not supported for a stream configured with StreamType.DEVNULL")
485
+
486
+ async def __anext__(self) -> T:
487
+ raise ValueError("__anext__ is not supported for a stream configured with StreamType.DEVNULL")
488
+
489
+ async def aclose(self):
490
+ raise ValueError("aclose is not supported for a stream configured with StreamType.DEVNULL")
491
+
492
+
493
+ class _StreamReader(Generic[T]):
494
+ """Retrieve logs from a stream (`stdout` or `stderr`).
495
+
496
+ As an asynchronous iterable, the object supports the `for` and `async for`
497
+ statements. Just loop over the object to read in chunks.
498
+
499
+ **Usage**
500
+
501
+ ```python fixture:running_app
502
+ from modal import Sandbox
503
+
504
+ sandbox = Sandbox.create(
505
+ "bash",
506
+ "-c",
507
+ "for i in $(seq 1 10); do echo foo; sleep 0.1; done",
508
+ app=running_app,
509
+ )
510
+ for message in sandbox.stdout:
511
+ print(f"Message: {message}")
512
+ ```
513
+ """
514
+
515
+ def __init__(
516
+ self,
517
+ file_descriptor: "api_pb2.FileDescriptor.ValueType",
518
+ object_id: str,
519
+ object_type: Literal["sandbox", "container_process"],
520
+ client: _Client,
521
+ stream_type: StreamType = StreamType.PIPE,
522
+ text: bool = True,
523
+ by_line: bool = False,
524
+ deadline: Optional[float] = None,
525
+ command_router_client: Optional[TaskCommandRouterClient] = None,
526
+ task_id: Optional[str] = None,
527
+ ) -> None:
528
+ """mdmd:hidden"""
529
+ if by_line and not text:
530
+ raise ValueError("line-buffering is only supported when text=True")
531
+
532
+ if command_router_client is None:
533
+ self._impl = _StreamReaderThroughServer(
534
+ file_descriptor, object_id, object_type, client, stream_type, text, by_line, deadline
535
+ )
536
+ else:
537
+ # The only reason task_id is optional is because StreamReader is
538
+ # also used for sandbox logs, which don't have a task ID available
539
+ # when the StreamReader is created.
540
+ assert task_id is not None
541
+ assert object_type == "container_process"
542
+ if stream_type == StreamType.DEVNULL:
543
+ self._impl = _DevnullStreamReader(file_descriptor)
544
+ else:
545
+ assert stream_type == StreamType.PIPE or stream_type == StreamType.STDOUT
546
+ # TODO(saltzm): The original implementation of STDOUT StreamType in
547
+ # _StreamReaderThroughServer prints to stdout immediately. This doesn't match
548
+ # python subprocess.run, which uses None to print to stdout immediately, and uses
549
+ # STDOUT as an argument to stderr to redirect stderr to the stdout stream. We should
550
+ # implement the old behavior here before moving out of beta, but after that
551
+ # we should consider changing the API to match python subprocess.run. I don't expect
552
+ # many customers are using this in any case, so I think it's fine to leave this
553
+ # unimplemented for now.
554
+ if stream_type == StreamType.STDOUT:
555
+ raise NotImplementedError(
556
+ "Currently only the PIPE stream type is supported when using exec "
557
+ "through a task command router, which is currently in beta."
558
+ )
559
+ params = _StreamReaderThroughCommandRouterParams(
560
+ file_descriptor, task_id, object_id, command_router_client, deadline
561
+ )
562
+ if text:
563
+ self._impl = _TextStreamReaderThroughCommandRouter(params, by_line)
564
+ else:
565
+ self._impl = _BytesStreamReaderThroughCommandRouter(params)
566
+
567
+ @property
568
+ def file_descriptor(self) -> int:
569
+ """Possible values are `1` for stdout and `2` for stderr."""
570
+ return self._impl.file_descriptor
571
+
572
+ async def read(self) -> T:
573
+ """Fetch the entire contents of the stream until EOF.
574
+
575
+ **Usage**
576
+
577
+ ```python fixture:running_app
578
+ from modal import Sandbox
579
+
580
+ sandbox = Sandbox.create("echo", "hello", app=running_app)
581
+ sandbox.wait()
582
+
583
+ print(sandbox.stdout.read())
584
+ ```
585
+ """
586
+ return await self._impl.read()
587
+
588
+ # TODO(saltzm): I'd prefer to have the implementation classes only implement __aiter__
589
+ # and have them return generator functions directly, but synchronicity doesn't let us
590
+ # return self._impl.__aiter__() here because it won't properly wrap the implementation
591
+ # classes.
592
+ def __aiter__(self) -> AsyncIterator[T]:
593
+ """mdmd:hidden"""
594
+ return self
595
+
596
+ async def __anext__(self) -> T:
597
+ """mdmd:hidden"""
598
+ return await self._impl.__anext__()
599
+
600
+ async def aclose(self):
601
+ """mdmd:hidden"""
602
+ await self._impl.aclose()
603
+
604
+
359
605
  MAX_BUFFER_SIZE = 2 * 1024 * 1024
360
606
 
361
607
 
362
- class _StreamWriter:
608
+ class _StreamWriterThroughServer:
363
609
  """Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
364
610
 
365
611
  def __init__(self, object_id: str, object_type: Literal["sandbox", "container_process"], client: _Client) -> None:
@@ -381,25 +627,6 @@ class _StreamWriter:
381
627
 
382
628
  This is non-blocking and queues the data to an internal buffer. Must be
383
629
  used along with the `drain()` method, which flushes the buffer.
384
-
385
- **Usage**
386
-
387
- ```python fixture:running_app
388
- from modal import Sandbox
389
-
390
- sandbox = Sandbox.create(
391
- "bash",
392
- "-c",
393
- "while read line; do echo $line; done",
394
- app=running_app,
395
- )
396
- sandbox.stdin.write(b"foo\\n")
397
- sandbox.stdin.write(b"bar\\n")
398
- sandbox.stdin.write_eof()
399
-
400
- sandbox.stdin.drain()
401
- sandbox.wait()
402
- ```
403
630
  """
404
631
  if self._is_closed:
405
632
  raise ValueError("Stdin is closed. Cannot write to it.")
@@ -407,7 +634,7 @@ class _StreamWriter:
407
634
  if isinstance(data, str):
408
635
  data = data.encode("utf-8")
409
636
  if len(self._buffer) + len(data) > MAX_BUFFER_SIZE:
410
- raise BufferError("Buffer size exceed limit. Call drain to clear the buffer.")
637
+ raise BufferError("Buffer size exceed limit. Call drain to flush the buffer.")
411
638
  self._buffer.extend(data)
412
639
  else:
413
640
  raise TypeError(f"data argument must be a bytes-like object, not {type(data).__name__}")
@@ -426,19 +653,6 @@ class _StreamWriter:
426
653
 
427
654
  This is a flow control method that blocks until data is sent. It returns
428
655
  when it is appropriate to continue writing data to the stream.
429
-
430
- **Usage**
431
-
432
- ```python notest
433
- writer.write(data)
434
- writer.drain()
435
- ```
436
-
437
- Async usage:
438
- ```python notest
439
- writer.write(data) # not a blocking operation
440
- await writer.drain.aio()
441
- ```
442
656
  """
443
657
  data = bytes(self._buffer)
444
658
  self._buffer.clear()
@@ -467,5 +681,127 @@ class _StreamWriter:
467
681
  raise exc
468
682
 
469
683
 
684
+ class _StreamWriterThroughCommandRouter:
685
+ def __init__(
686
+ self,
687
+ object_id: str,
688
+ command_router_client: TaskCommandRouterClient,
689
+ task_id: str,
690
+ ) -> None:
691
+ self._object_id = object_id
692
+ self._command_router_client = command_router_client
693
+ self._task_id = task_id
694
+ self._is_closed = False
695
+ self._buffer = bytearray()
696
+ self._offset = 0
697
+
698
+ def write(self, data: Union[bytes, bytearray, memoryview, str]) -> None:
699
+ if self._is_closed:
700
+ raise ValueError("Stdin is closed. Cannot write to it.")
701
+ if isinstance(data, (bytes, bytearray, memoryview, str)):
702
+ if isinstance(data, str):
703
+ data = data.encode("utf-8")
704
+ if len(self._buffer) + len(data) > MAX_BUFFER_SIZE:
705
+ raise BufferError("Buffer size exceed limit. Call drain to flush the buffer.")
706
+ self._buffer.extend(data)
707
+ else:
708
+ raise TypeError(f"data argument must be a bytes-like object, not {type(data).__name__}")
709
+
710
+ def write_eof(self) -> None:
711
+ self._is_closed = True
712
+
713
+ async def drain(self) -> None:
714
+ eof = self._is_closed
715
+ # NB: There's no need to prevent writing eof twice, because the command router will ignore the second EOF.
716
+ if self._buffer or eof:
717
+ data = bytes(self._buffer)
718
+ await self._command_router_client.exec_stdin_write(
719
+ task_id=self._task_id, exec_id=self._object_id, offset=self._offset, data=data, eof=eof
720
+ )
721
+ # Only clear the buffer after writing the data to the command router is successful.
722
+ # This allows the client to retry drain() in the event of an exception (though
723
+ # exec_stdin_write already retries on transient errors, so most users will probably
724
+ # not do this).
725
+ self._buffer.clear()
726
+ self._offset += len(data)
727
+
728
+
729
+ class _StreamWriter:
730
+ """Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
731
+
732
+ def __init__(
733
+ self,
734
+ object_id: str,
735
+ object_type: Literal["sandbox", "container_process"],
736
+ client: _Client,
737
+ command_router_client: Optional[TaskCommandRouterClient] = None,
738
+ task_id: Optional[str] = None,
739
+ ) -> None:
740
+ """mdmd:hidden"""
741
+ if command_router_client is None:
742
+ self._impl = _StreamWriterThroughServer(object_id, object_type, client)
743
+ else:
744
+ assert task_id is not None
745
+ assert object_type == "container_process"
746
+ self._impl = _StreamWriterThroughCommandRouter(object_id, command_router_client, task_id=task_id)
747
+
748
+ def write(self, data: Union[bytes, bytearray, memoryview, str]) -> None:
749
+ """Write data to the stream but does not send it immediately.
750
+
751
+ This is non-blocking and queues the data to an internal buffer. Must be
752
+ used along with the `drain()` method, which flushes the buffer.
753
+
754
+ **Usage**
755
+
756
+ ```python fixture:running_app
757
+ from modal import Sandbox
758
+
759
+ sandbox = Sandbox.create(
760
+ "bash",
761
+ "-c",
762
+ "while read line; do echo $line; done",
763
+ app=running_app,
764
+ )
765
+ sandbox.stdin.write(b"foo\\n")
766
+ sandbox.stdin.write(b"bar\\n")
767
+ sandbox.stdin.write_eof()
768
+
769
+ sandbox.stdin.drain()
770
+ sandbox.wait()
771
+ ```
772
+ """
773
+ self._impl.write(data)
774
+
775
+ def write_eof(self) -> None:
776
+ """Close the write end of the stream after the buffered data is drained.
777
+
778
+ If the process was blocked on input, it will become unblocked after
779
+ `write_eof()`. This method needs to be used along with the `drain()`
780
+ method, which flushes the EOF to the process.
781
+ """
782
+ self._impl.write_eof()
783
+
784
+ async def drain(self) -> None:
785
+ """Flush the write buffer and send data to the running process.
786
+
787
+ This is a flow control method that blocks until data is sent. It returns
788
+ when it is appropriate to continue writing data to the stream.
789
+
790
+ **Usage**
791
+
792
+ ```python notest
793
+ writer.write(data)
794
+ writer.drain()
795
+ ```
796
+
797
+ Async usage:
798
+ ```python notest
799
+ writer.write(data) # not a blocking operation
800
+ await writer.drain.aio()
801
+ ```
802
+ """
803
+ await self._impl.drain()
804
+
805
+
470
806
  StreamReader = synchronize_api(_StreamReader)
471
807
  StreamWriter = synchronize_api(_StreamWriter)
modal/io_streams.pyi CHANGED
@@ -1,4 +1,5 @@
1
1
  import collections.abc
2
+ import modal._utils.task_command_router_client
2
3
  import modal.client
3
4
  import modal.stream_type
4
5
  import typing
@@ -17,6 +18,167 @@ def _container_process_logs_iterator(
17
18
 
18
19
  T = typing.TypeVar("T")
19
20
 
21
+ class _StreamReaderThroughServer(typing.Generic[T]):
22
+ """A StreamReader implementation that reads from the server."""
23
+
24
+ _stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
25
+
26
+ def __init__(
27
+ self,
28
+ file_descriptor: int,
29
+ object_id: str,
30
+ object_type: typing.Literal["sandbox", "container_process"],
31
+ client: modal.client._Client,
32
+ stream_type: modal.stream_type.StreamType = modal.stream_type.StreamType.PIPE,
33
+ text: bool = True,
34
+ by_line: bool = False,
35
+ deadline: typing.Optional[float] = None,
36
+ ) -> None:
37
+ """mdmd:hidden"""
38
+ ...
39
+
40
+ @property
41
+ def file_descriptor(self) -> int:
42
+ """Possible values are `1` for stdout and `2` for stderr."""
43
+ ...
44
+
45
+ async def read(self) -> T:
46
+ """Fetch the entire contents of the stream until EOF."""
47
+ ...
48
+
49
+ async def _consume_container_process_stream(self):
50
+ """Consume the container process stream and store messages in the buffer."""
51
+ ...
52
+
53
+ def _stream_container_process(self) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]:
54
+ """Streams the container process buffer to the reader."""
55
+ ...
56
+
57
+ def _get_logs(
58
+ self, skip_empty_messages: bool = True
59
+ ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
60
+ """Streams sandbox or process logs from the server to the reader.
61
+
62
+ Logs returned by this method may contain partial or multiple lines at a time.
63
+
64
+ When the stream receives an EOF, it yields None. Once an EOF is received,
65
+ subsequent invocations will not yield logs.
66
+ """
67
+ ...
68
+
69
+ def _get_logs_by_line(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
70
+ """Process logs from the server and yield complete lines only."""
71
+ ...
72
+
73
+ def _ensure_stream(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
74
+ async def __anext__(self) -> T:
75
+ """mdmd:hidden"""
76
+ ...
77
+
78
+ async def aclose(self):
79
+ """mdmd:hidden"""
80
+ ...
81
+
82
+ def _decode_bytes_stream_to_str(
83
+ stream: collections.abc.AsyncGenerator[bytes, None],
84
+ ) -> collections.abc.AsyncGenerator[str, None]:
85
+ """Incrementally decode a bytes async generator as UTF-8 without breaking on chunk boundaries.
86
+
87
+ This function uses a streaming UTF-8 decoder so that multi-byte characters split across
88
+ chunks are handled correctly instead of raising ``UnicodeDecodeError``.
89
+ """
90
+ ...
91
+
92
+ def _stream_by_line(stream: collections.abc.AsyncGenerator[bytes, None]) -> collections.abc.AsyncGenerator[bytes, None]:
93
+ """Yield complete lines only (ending with
94
+ ), buffering partial lines until complete.
95
+ """
96
+ ...
97
+
98
+ class _StreamReaderThroughCommandRouterParams:
99
+ """_StreamReaderThroughCommandRouterParams(file_descriptor: 'api_pb2.FileDescriptor.ValueType', task_id: str, object_id: str, command_router_client: modal._utils.task_command_router_client.TaskCommandRouterClient, deadline: Optional[float])"""
100
+
101
+ file_descriptor: int
102
+ task_id: str
103
+ object_id: str
104
+ command_router_client: modal._utils.task_command_router_client.TaskCommandRouterClient
105
+ deadline: typing.Optional[float]
106
+
107
+ def __init__(
108
+ self,
109
+ file_descriptor: int,
110
+ task_id: str,
111
+ object_id: str,
112
+ command_router_client: modal._utils.task_command_router_client.TaskCommandRouterClient,
113
+ deadline: typing.Optional[float],
114
+ ) -> None:
115
+ """Initialize self. See help(type(self)) for accurate signature."""
116
+ ...
117
+
118
+ def __repr__(self):
119
+ """Return repr(self)."""
120
+ ...
121
+
122
+ def __eq__(self, other):
123
+ """Return self==value."""
124
+ ...
125
+
126
+ def _stdio_stream_from_command_router(
127
+ params: _StreamReaderThroughCommandRouterParams,
128
+ ) -> collections.abc.AsyncGenerator[bytes, None]:
129
+ """Stream raw bytes from the router client."""
130
+ ...
131
+
132
+ class _BytesStreamReaderThroughCommandRouter(typing.Generic[T]):
133
+ """StreamReader implementation that will read directly from the worker that
134
+ hosts the sandbox.
135
+
136
+ This implementation is used for non-text streams.
137
+ """
138
+ def __init__(self, params: _StreamReaderThroughCommandRouterParams) -> None:
139
+ """Initialize self. See help(type(self)) for accurate signature."""
140
+ ...
141
+
142
+ @property
143
+ def file_descriptor(self) -> int: ...
144
+ async def read(self) -> T: ...
145
+ def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
146
+ async def __anext__(self) -> T: ...
147
+ async def aclose(self): ...
148
+
149
+ class _TextStreamReaderThroughCommandRouter(typing.Generic[T]):
150
+ """StreamReader implementation that will read directly from the worker
151
+ that hosts the sandbox.
152
+
153
+ This implementation is used for text streams.
154
+ """
155
+ def __init__(self, params: _StreamReaderThroughCommandRouterParams, by_line: bool) -> None:
156
+ """Initialize self. See help(type(self)) for accurate signature."""
157
+ ...
158
+
159
+ @property
160
+ def file_descriptor(self) -> int: ...
161
+ async def read(self) -> T: ...
162
+ def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
163
+ async def __anext__(self) -> T: ...
164
+ async def aclose(self): ...
165
+
166
+ class _DevnullStreamReader(typing.Generic[T]):
167
+ """StreamReader implementation for a stream configured with
168
+ StreamType.DEVNULL. Throws an error if read or any other method is
169
+ called.
170
+ """
171
+ def __init__(self, file_descriptor: int) -> None:
172
+ """Initialize self. See help(type(self)) for accurate signature."""
173
+ ...
174
+
175
+ @property
176
+ def file_descriptor(self) -> int: ...
177
+ async def read(self) -> T: ...
178
+ def __aiter__(self) -> collections.abc.AsyncIterator[T]: ...
179
+ async def __anext__(self) -> T: ...
180
+ async def aclose(self): ...
181
+
20
182
  class _StreamReader(typing.Generic[T]):
21
183
  """Retrieve logs from a stream (`stdout` or `stderr`).
22
184
 
@@ -38,9 +200,6 @@ class _StreamReader(typing.Generic[T]):
38
200
  print(f"Message: {message}")
39
201
  ```
40
202
  """
41
-
42
- _stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
43
-
44
203
  def __init__(
45
204
  self,
46
205
  file_descriptor: int,
@@ -51,6 +210,8 @@ class _StreamReader(typing.Generic[T]):
51
210
  text: bool = True,
52
211
  by_line: bool = False,
53
212
  deadline: typing.Optional[float] = None,
213
+ command_router_client: typing.Optional[modal._utils.task_command_router_client.TaskCommandRouterClient] = None,
214
+ task_id: typing.Optional[str] = None,
54
215
  ) -> None:
55
216
  """mdmd:hidden"""
56
217
  ...
@@ -76,52 +237,79 @@ class _StreamReader(typing.Generic[T]):
76
237
  """
77
238
  ...
78
239
 
79
- async def _consume_container_process_stream(self):
80
- """Consume the container process stream and store messages in the buffer."""
240
+ def __aiter__(self) -> collections.abc.AsyncIterator[T]:
241
+ """mdmd:hidden"""
81
242
  ...
82
243
 
83
- def _stream_container_process(self) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]:
84
- """Streams the container process buffer to the reader."""
244
+ async def __anext__(self) -> T:
245
+ """mdmd:hidden"""
85
246
  ...
86
247
 
87
- def _get_logs(
88
- self, skip_empty_messages: bool = True
89
- ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
90
- """Streams sandbox or process logs from the server to the reader.
248
+ async def aclose(self):
249
+ """mdmd:hidden"""
250
+ ...
91
251
 
92
- Logs returned by this method may contain partial or multiple lines at a time.
252
+ class _StreamWriterThroughServer:
253
+ """Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
254
+ def __init__(
255
+ self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client._Client
256
+ ) -> None:
257
+ """mdmd:hidden"""
258
+ ...
93
259
 
94
- When the stream receives an EOF, it yields None. Once an EOF is received,
95
- subsequent invocations will not yield logs.
260
+ def _get_next_index(self) -> int: ...
261
+ def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None:
262
+ """Write data to the stream but does not send it immediately.
263
+
264
+ This is non-blocking and queues the data to an internal buffer. Must be
265
+ used along with the `drain()` method, which flushes the buffer.
96
266
  """
97
267
  ...
98
268
 
99
- def _get_logs_by_line(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
100
- """Process logs from the server and yield complete lines only."""
101
- ...
269
+ def write_eof(self) -> None:
270
+ """Close the write end of the stream after the buffered data is drained.
102
271
 
103
- def _ensure_stream(self) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
104
- def __aiter__(self) -> collections.abc.AsyncIterator[T]:
105
- """mdmd:hidden"""
272
+ If the process was blocked on input, it will become unblocked after
273
+ `write_eof()`. This method needs to be used along with the `drain()`
274
+ method, which flushes the EOF to the process.
275
+ """
106
276
  ...
107
277
 
108
- async def __anext__(self) -> T:
109
- """mdmd:hidden"""
278
+ async def drain(self) -> None:
279
+ """Flush the write buffer and send data to the running process.
280
+
281
+ This is a flow control method that blocks until data is sent. It returns
282
+ when it is appropriate to continue writing data to the stream.
283
+ """
110
284
  ...
111
285
 
112
- async def aclose(self):
113
- """mdmd:hidden"""
286
+ class _StreamWriterThroughCommandRouter:
287
+ def __init__(
288
+ self,
289
+ object_id: str,
290
+ command_router_client: modal._utils.task_command_router_client.TaskCommandRouterClient,
291
+ task_id: str,
292
+ ) -> None:
293
+ """Initialize self. See help(type(self)) for accurate signature."""
114
294
  ...
115
295
 
296
+ def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None: ...
297
+ def write_eof(self) -> None: ...
298
+ async def drain(self) -> None: ...
299
+
116
300
  class _StreamWriter:
117
301
  """Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
118
302
  def __init__(
119
- self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client._Client
303
+ self,
304
+ object_id: str,
305
+ object_type: typing.Literal["sandbox", "container_process"],
306
+ client: modal.client._Client,
307
+ command_router_client: typing.Optional[modal._utils.task_command_router_client.TaskCommandRouterClient] = None,
308
+ task_id: typing.Optional[str] = None,
120
309
  ) -> None:
121
310
  """mdmd:hidden"""
122
311
  ...
123
312
 
124
- def _get_next_index(self) -> int: ...
125
313
  def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None:
126
314
  """Write data to the stream but does not send it immediately.
127
315
 
@@ -204,9 +392,6 @@ class StreamReader(typing.Generic[T]):
204
392
  print(f"Message: {message}")
205
393
  ```
206
394
  """
207
-
208
- _stream: typing.Optional[collections.abc.AsyncGenerator[typing.Optional[bytes], None]]
209
-
210
395
  def __init__(
211
396
  self,
212
397
  file_descriptor: int,
@@ -217,6 +402,8 @@ class StreamReader(typing.Generic[T]):
217
402
  text: bool = True,
218
403
  by_line: bool = False,
219
404
  deadline: typing.Optional[float] = None,
405
+ command_router_client: typing.Optional[modal._utils.task_command_router_client.TaskCommandRouterClient] = None,
406
+ task_id: typing.Optional[str] = None,
220
407
  ) -> None:
221
408
  """mdmd:hidden"""
222
409
  ...
@@ -261,70 +448,6 @@ class StreamReader(typing.Generic[T]):
261
448
 
262
449
  read: __read_spec[T, typing_extensions.Self]
263
450
 
264
- class ___consume_container_process_stream_spec(typing_extensions.Protocol[SUPERSELF]):
265
- def __call__(self, /):
266
- """Consume the container process stream and store messages in the buffer."""
267
- ...
268
-
269
- async def aio(self, /):
270
- """Consume the container process stream and store messages in the buffer."""
271
- ...
272
-
273
- _consume_container_process_stream: ___consume_container_process_stream_spec[typing_extensions.Self]
274
-
275
- class ___stream_container_process_spec(typing_extensions.Protocol[SUPERSELF]):
276
- def __call__(self, /) -> typing.Generator[tuple[typing.Optional[bytes], str], None, None]:
277
- """Streams the container process buffer to the reader."""
278
- ...
279
-
280
- def aio(self, /) -> collections.abc.AsyncGenerator[tuple[typing.Optional[bytes], str], None]:
281
- """Streams the container process buffer to the reader."""
282
- ...
283
-
284
- _stream_container_process: ___stream_container_process_spec[typing_extensions.Self]
285
-
286
- class ___get_logs_spec(typing_extensions.Protocol[SUPERSELF]):
287
- def __call__(self, /, skip_empty_messages: bool = True) -> typing.Generator[typing.Optional[bytes], None, None]:
288
- """Streams sandbox or process logs from the server to the reader.
289
-
290
- Logs returned by this method may contain partial or multiple lines at a time.
291
-
292
- When the stream receives an EOF, it yields None. Once an EOF is received,
293
- subsequent invocations will not yield logs.
294
- """
295
- ...
296
-
297
- def aio(
298
- self, /, skip_empty_messages: bool = True
299
- ) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
300
- """Streams sandbox or process logs from the server to the reader.
301
-
302
- Logs returned by this method may contain partial or multiple lines at a time.
303
-
304
- When the stream receives an EOF, it yields None. Once an EOF is received,
305
- subsequent invocations will not yield logs.
306
- """
307
- ...
308
-
309
- _get_logs: ___get_logs_spec[typing_extensions.Self]
310
-
311
- class ___get_logs_by_line_spec(typing_extensions.Protocol[SUPERSELF]):
312
- def __call__(self, /) -> typing.Generator[typing.Optional[bytes], None, None]:
313
- """Process logs from the server and yield complete lines only."""
314
- ...
315
-
316
- def aio(self, /) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]:
317
- """Process logs from the server and yield complete lines only."""
318
- ...
319
-
320
- _get_logs_by_line: ___get_logs_by_line_spec[typing_extensions.Self]
321
-
322
- class ___ensure_stream_spec(typing_extensions.Protocol[SUPERSELF]):
323
- def __call__(self, /) -> typing.Generator[typing.Optional[bytes], None, None]: ...
324
- def aio(self, /) -> collections.abc.AsyncGenerator[typing.Optional[bytes], None]: ...
325
-
326
- _ensure_stream: ___ensure_stream_spec[typing_extensions.Self]
327
-
328
451
  def __iter__(self) -> typing.Iterator[T]:
329
452
  """mdmd:hidden"""
330
453
  ...
@@ -352,12 +475,16 @@ class StreamReader(typing.Generic[T]):
352
475
  class StreamWriter:
353
476
  """Provides an interface to buffer and write logs to a sandbox or container process stream (`stdin`)."""
354
477
  def __init__(
355
- self, object_id: str, object_type: typing.Literal["sandbox", "container_process"], client: modal.client.Client
478
+ self,
479
+ object_id: str,
480
+ object_type: typing.Literal["sandbox", "container_process"],
481
+ client: modal.client.Client,
482
+ command_router_client: typing.Optional[modal._utils.task_command_router_client.TaskCommandRouterClient] = None,
483
+ task_id: typing.Optional[str] = None,
356
484
  ) -> None:
357
485
  """mdmd:hidden"""
358
486
  ...
359
487
 
360
- def _get_next_index(self) -> int: ...
361
488
  def write(self, data: typing.Union[bytes, bytearray, memoryview, str]) -> None:
362
489
  """Write data to the stream but does not send it immediately.
363
490
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: modal
3
- Version: 1.2.1.dev11
3
+ Version: 1.2.1.dev13
4
4
  Summary: Python client library for Modal
5
5
  Author-email: Modal Labs <support@modal.com>
6
6
  License: Apache-2.0
@@ -24,13 +24,13 @@ modal/app.pyi,sha256=AUV5Rp8qQrZJTP2waoKHFY7rYgsXNMYibMcCAQKuSeo,50544
24
24
  modal/billing.py,sha256=zmQ3bcCJlwa4KD1IA_QgdWpm1pn13c-7qfy79iEauYI,195
25
25
  modal/call_graph.py,sha256=1g2DGcMIJvRy-xKicuf63IVE98gJSnQsr8R_NVMptNc,2581
26
26
  modal/client.py,sha256=kyAIVB3Ay-XKJizQ_1ufUFB__EagV0MLmHJpyYyJ7J0,18636
27
- modal/client.pyi,sha256=ph8_OsWEq_LudTDG7jrAv84RKbPE2JfPw8SiCFAXLf8,15831
27
+ modal/client.pyi,sha256=ly6S74QtNMbeJ0kqCRdVsoppgC7cgA_gFuw9Zd6H0UI,15831
28
28
  modal/cloud_bucket_mount.py,sha256=I2GRXYhOWLIz2kJZjXu75jAm9EJkBNcutGc6jR2ReUw,5928
29
29
  modal/cloud_bucket_mount.pyi,sha256=VuUOipMIHqFXMkD-3g2bsoqpSxV5qswlFHDOqPQzYAo,7405
30
30
  modal/cls.py,sha256=ZxzivE3fNci4-A5uyBYNAzXMXtdqDg3gnYvgbdy5fhg,40384
31
31
  modal/cls.pyi,sha256=jJsDPFoqzM4ht-V-e-xEJKJ5TINLF0fYtoBm_UeAW5Y,27281
32
32
  modal/config.py,sha256=hpgkgQKbjzo6gVbRzXQrky72_KpdSEm65RNi1M2iNjc,13038
33
- modal/container_process.py,sha256=Mutkl7sg_WR5zP4oJiWSC-3UdYRqp0zdKi1shZbi-bk,6996
33
+ modal/container_process.py,sha256=DnqlgHiM-7rgVdJNcaXyZlXFD6DFLxEgMSFkieQ6Oj0,7452
34
34
  modal/container_process.pyi,sha256=9m-st3hCUlNN1GOTctfPPvIvoLtEl7FbuGWwif5-7YU,6037
35
35
  modal/dict.py,sha256=XkaxuojMVtcc4bZvCjJcd6DedU5xxfF8H4w-mDzFPCo,21580
36
36
  modal/dict.pyi,sha256=deOiwuwZtwXqedC3h19SwoQIWc4mUnDTBM5XkONt48Y,31712
@@ -45,8 +45,8 @@ modal/functions.pyi,sha256=Z6VuukLrjASAgf0kV9I6c09WvP_b2gCujX6f9j2bBaw,37988
45
45
  modal/gpu.py,sha256=Fe5ORvVPDIstSq1xjmM6OoNgLYFWvogP9r5BgmD3hYg,6769
46
46
  modal/image.py,sha256=HDkOnhIAN8g63a8LTN4J5SjC9ciReFQQJIxTS2z5KFM,107216
47
47
  modal/image.pyi,sha256=dMvMwAuvWkNN2BRYJFijkEy2m_xtEXgCKK0T7FVldsc,77514
48
- modal/io_streams.py,sha256=hZOVc5beOAm8S_VQQmmKUbk_BJ9OltN83RY0yMPqUDo,16545
49
- modal/io_streams.pyi,sha256=aOun_jUFKHSJyUY6-7gKvNoxzcULsa8_hxdtEO7v-gk,13980
48
+ modal/io_streams.py,sha256=Kv-No6WcNBouwdoogwHafOsmPOKqxTpvVGLU0mM6xMc,29564
49
+ modal/io_streams.pyi,sha256=h7qtAbj8LsN-eJKAGjBhnMBegvWprc_0AmwVFi6rj2Y,18084
50
50
  modal/mount.py,sha256=G7_xhQMZqokgfsaFLMch0YR3fs-OUNqYUm3f4jHTSMQ,33161
51
51
  modal/mount.pyi,sha256=MD_zV2M7eCWxbOpQRjU60aHevN-bmbiywaCX82QoFlw,15380
52
52
  modal/network_file_system.py,sha256=ZdEIRgdcR-p_ILyw_AecEtPOhhrSWJeADYCtFnhtaHM,13509
@@ -156,7 +156,7 @@ modal/experimental/__init__.py,sha256=9gkVuDmu3m4TlKoU3MzEtTOemUSs8EEOWba40s7Aa0
156
156
  modal/experimental/flash.py,sha256=C4sef08rARYFllsgtqukFmYL18SZW0_JpMS0BejDcUs,28552
157
157
  modal/experimental/flash.pyi,sha256=vV_OQhtdrPn8SW0XrBK-aLLHHIvxAzLzwFbWrke-m74,15463
158
158
  modal/experimental/ipython.py,sha256=TrCfmol9LGsRZMeDoeMPx3Hv3BFqQhYnmD_iH0pqdhk,2904
159
- modal-1.2.1.dev11.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
159
+ modal-1.2.1.dev13.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
160
160
  modal_docs/__init__.py,sha256=svYKtV8HDwDCN86zbdWqyq5T8sMdGDj0PVlzc2tIxDM,28
161
161
  modal_docs/gen_cli_docs.py,sha256=c1yfBS_x--gL5bs0N4ihMwqwX8l3IBWSkBAKNNIi6bQ,3801
162
162
  modal_docs/gen_reference_docs.py,sha256=d_CQUGQ0rfw28u75I2mov9AlS773z9rG40-yq5o7g2U,6359
@@ -184,10 +184,10 @@ modal_proto/task_command_router_pb2.py,sha256=_pD2ZpU0bNzhwBdzmLoLyLtAtftI_Agxwn
184
184
  modal_proto/task_command_router_pb2.pyi,sha256=EyDgXPLr7alqjXYERV8w_MPuO404x0uCppmSkrfE9IE,14589
185
185
  modal_proto/task_command_router_pb2_grpc.py,sha256=uEQ0HdrCp8v-9bB5yIic9muA8spCShLHY6Bz9cCgOUE,10114
186
186
  modal_proto/task_command_router_pb2_grpc.pyi,sha256=s3Yxsrawdj4nr8vqQqsAxyX6ilWaGbdECy425KKbLIA,3301
187
- modal_version/__init__.py,sha256=16uG298P5zLwLoTMMHjlqZF78qdA8tqhO7F9_yVUFvg,121
187
+ modal_version/__init__.py,sha256=BvNzpPJEyoSPk632gFDNj08JAPD46g42lR-teo7dEP8,121
188
188
  modal_version/__main__.py,sha256=2FO0yYQQwDTh6udt1h-cBnGd1c4ZyHnHSI4BksxzVac,105
189
- modal-1.2.1.dev11.dist-info/METADATA,sha256=VBI4dhCiWGWn6pJE5UqZur7MkPBsNM_0jghxFSzbz0M,2484
190
- modal-1.2.1.dev11.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
191
- modal-1.2.1.dev11.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
192
- modal-1.2.1.dev11.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
193
- modal-1.2.1.dev11.dist-info/RECORD,,
189
+ modal-1.2.1.dev13.dist-info/METADATA,sha256=4JrQuKWcdeqkB350h27KFdy5O4gp4c4rk8EBdsBKSz0,2484
190
+ modal-1.2.1.dev13.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
191
+ modal-1.2.1.dev13.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
192
+ modal-1.2.1.dev13.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
193
+ modal-1.2.1.dev13.dist-info/RECORD,,
modal_version/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # Copyright Modal Labs 2025
2
2
  """Supplies the current version of the modal client library."""
3
3
 
4
- __version__ = "1.2.1.dev11"
4
+ __version__ = "1.2.1.dev13"