modal 1.0.6.dev58__py3-none-any.whl → 1.2.3.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

Files changed (147) hide show
  1. modal/__main__.py +3 -4
  2. modal/_billing.py +80 -0
  3. modal/_clustered_functions.py +7 -3
  4. modal/_clustered_functions.pyi +4 -2
  5. modal/_container_entrypoint.py +41 -49
  6. modal/_functions.py +424 -195
  7. modal/_grpc_client.py +171 -0
  8. modal/_load_context.py +105 -0
  9. modal/_object.py +68 -20
  10. modal/_output.py +58 -45
  11. modal/_partial_function.py +36 -11
  12. modal/_pty.py +7 -3
  13. modal/_resolver.py +21 -35
  14. modal/_runtime/asgi.py +4 -3
  15. modal/_runtime/container_io_manager.py +301 -186
  16. modal/_runtime/container_io_manager.pyi +70 -61
  17. modal/_runtime/execution_context.py +18 -2
  18. modal/_runtime/execution_context.pyi +4 -1
  19. modal/_runtime/gpu_memory_snapshot.py +170 -63
  20. modal/_runtime/user_code_imports.py +28 -58
  21. modal/_serialization.py +57 -1
  22. modal/_utils/async_utils.py +33 -12
  23. modal/_utils/auth_token_manager.py +2 -5
  24. modal/_utils/blob_utils.py +110 -53
  25. modal/_utils/function_utils.py +49 -42
  26. modal/_utils/grpc_utils.py +80 -50
  27. modal/_utils/mount_utils.py +26 -1
  28. modal/_utils/name_utils.py +17 -3
  29. modal/_utils/task_command_router_client.py +536 -0
  30. modal/_utils/time_utils.py +34 -6
  31. modal/app.py +219 -83
  32. modal/app.pyi +229 -56
  33. modal/billing.py +5 -0
  34. modal/{requirements → builder}/2025.06.txt +1 -0
  35. modal/{requirements → builder}/PREVIEW.txt +1 -0
  36. modal/cli/_download.py +19 -3
  37. modal/cli/_traceback.py +3 -2
  38. modal/cli/app.py +4 -4
  39. modal/cli/cluster.py +15 -7
  40. modal/cli/config.py +5 -3
  41. modal/cli/container.py +7 -6
  42. modal/cli/dict.py +22 -16
  43. modal/cli/entry_point.py +12 -5
  44. modal/cli/environment.py +5 -4
  45. modal/cli/import_refs.py +3 -3
  46. modal/cli/launch.py +102 -5
  47. modal/cli/network_file_system.py +9 -13
  48. modal/cli/profile.py +3 -2
  49. modal/cli/programs/launch_instance_ssh.py +94 -0
  50. modal/cli/programs/run_jupyter.py +1 -1
  51. modal/cli/programs/run_marimo.py +95 -0
  52. modal/cli/programs/vscode.py +1 -1
  53. modal/cli/queues.py +57 -26
  54. modal/cli/run.py +58 -16
  55. modal/cli/secret.py +48 -22
  56. modal/cli/utils.py +3 -4
  57. modal/cli/volume.py +28 -25
  58. modal/client.py +13 -116
  59. modal/client.pyi +9 -91
  60. modal/cloud_bucket_mount.py +5 -3
  61. modal/cloud_bucket_mount.pyi +5 -1
  62. modal/cls.py +130 -102
  63. modal/cls.pyi +45 -85
  64. modal/config.py +29 -10
  65. modal/container_process.py +291 -13
  66. modal/container_process.pyi +95 -32
  67. modal/dict.py +282 -63
  68. modal/dict.pyi +423 -73
  69. modal/environments.py +15 -27
  70. modal/environments.pyi +5 -15
  71. modal/exception.py +8 -0
  72. modal/experimental/__init__.py +143 -38
  73. modal/experimental/flash.py +247 -78
  74. modal/experimental/flash.pyi +137 -9
  75. modal/file_io.py +14 -28
  76. modal/file_io.pyi +2 -2
  77. modal/file_pattern_matcher.py +25 -16
  78. modal/functions.pyi +134 -61
  79. modal/image.py +255 -86
  80. modal/image.pyi +300 -62
  81. modal/io_streams.py +436 -126
  82. modal/io_streams.pyi +236 -171
  83. modal/mount.py +62 -157
  84. modal/mount.pyi +45 -172
  85. modal/network_file_system.py +30 -53
  86. modal/network_file_system.pyi +16 -76
  87. modal/object.pyi +42 -8
  88. modal/parallel_map.py +821 -113
  89. modal/parallel_map.pyi +134 -0
  90. modal/partial_function.pyi +4 -1
  91. modal/proxy.py +16 -7
  92. modal/proxy.pyi +10 -2
  93. modal/queue.py +263 -61
  94. modal/queue.pyi +409 -66
  95. modal/runner.py +112 -92
  96. modal/runner.pyi +45 -27
  97. modal/sandbox.py +451 -124
  98. modal/sandbox.pyi +513 -67
  99. modal/secret.py +291 -67
  100. modal/secret.pyi +425 -19
  101. modal/serving.py +7 -11
  102. modal/serving.pyi +7 -8
  103. modal/snapshot.py +11 -8
  104. modal/token_flow.py +4 -4
  105. modal/volume.py +344 -98
  106. modal/volume.pyi +464 -68
  107. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/METADATA +9 -8
  108. modal-1.2.3.dev7.dist-info/RECORD +195 -0
  109. modal_docs/mdmd/mdmd.py +11 -1
  110. modal_proto/api.proto +399 -67
  111. modal_proto/api_grpc.py +241 -1
  112. modal_proto/api_pb2.py +1395 -1000
  113. modal_proto/api_pb2.pyi +1239 -79
  114. modal_proto/api_pb2_grpc.py +499 -4
  115. modal_proto/api_pb2_grpc.pyi +162 -14
  116. modal_proto/modal_api_grpc.py +175 -160
  117. modal_proto/sandbox_router.proto +145 -0
  118. modal_proto/sandbox_router_grpc.py +105 -0
  119. modal_proto/sandbox_router_pb2.py +149 -0
  120. modal_proto/sandbox_router_pb2.pyi +333 -0
  121. modal_proto/sandbox_router_pb2_grpc.py +203 -0
  122. modal_proto/sandbox_router_pb2_grpc.pyi +75 -0
  123. modal_proto/task_command_router.proto +144 -0
  124. modal_proto/task_command_router_grpc.py +105 -0
  125. modal_proto/task_command_router_pb2.py +149 -0
  126. modal_proto/task_command_router_pb2.pyi +333 -0
  127. modal_proto/task_command_router_pb2_grpc.py +203 -0
  128. modal_proto/task_command_router_pb2_grpc.pyi +75 -0
  129. modal_version/__init__.py +1 -1
  130. modal-1.0.6.dev58.dist-info/RECORD +0 -183
  131. modal_proto/modal_options_grpc.py +0 -3
  132. modal_proto/options.proto +0 -19
  133. modal_proto/options_grpc.py +0 -3
  134. modal_proto/options_pb2.py +0 -35
  135. modal_proto/options_pb2.pyi +0 -20
  136. modal_proto/options_pb2_grpc.py +0 -4
  137. modal_proto/options_pb2_grpc.pyi +0 -7
  138. /modal/{requirements → builder}/2023.12.312.txt +0 -0
  139. /modal/{requirements → builder}/2023.12.txt +0 -0
  140. /modal/{requirements → builder}/2024.04.txt +0 -0
  141. /modal/{requirements → builder}/2024.10.txt +0 -0
  142. /modal/{requirements → builder}/README.md +0 -0
  143. /modal/{requirements → builder}/base-images.json +0 -0
  144. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/WHEEL +0 -0
  145. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/entry_points.txt +0 -0
  146. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/licenses/LICENSE +0 -0
  147. {modal-1.0.6.dev58.dist-info → modal-1.2.3.dev7.dist-info}/top_level.txt +0 -0
modal/_grpc_client.py ADDED
@@ -0,0 +1,171 @@
1
+ # Copyright Modal Labs 2025
2
+ from typing import TYPE_CHECKING, Any, Collection, Generic, Literal, Mapping, Optional, TypeVar, Union, overload
3
+
4
+ import grpclib.client
5
+ from google.protobuf.message import Message
6
+ from grpclib import GRPCError, Status
7
+
8
+ from ._traceback import suppress_tb_frames
9
+ from ._utils.grpc_utils import Retry, _retry_transient_errors
10
+ from .config import config, logger
11
+ from .exception import InvalidError, NotFoundError
12
+
13
+ if TYPE_CHECKING:
14
+ from .client import _Client
15
+
16
+
17
+ _Value = Union[str, bytes]
18
+ _MetadataLike = Union[Mapping[str, _Value], Collection[tuple[str, _Value]]]
19
+ RequestType = TypeVar("RequestType", bound=Message)
20
+ ResponseType = TypeVar("ResponseType", bound=Message)
21
+
22
+
23
+ class grpc_error_converter:
24
+ def __enter__(self):
25
+ pass
26
+
27
+ def __exit__(self, exc_type, exc, traceback) -> Literal[False]:
28
+ # skip all internal frames from grpclib
29
+ use_full_traceback = config.get("traceback")
30
+ with suppress_tb_frames(1):
31
+ if isinstance(exc, GRPCError):
32
+ if exc.status == Status.NOT_FOUND:
33
+ if use_full_traceback:
34
+ raise NotFoundError(exc.message)
35
+ else:
36
+ raise NotFoundError(exc.message) from None # from None to skip the grpc-internal cause
37
+
38
+ if not use_full_traceback:
39
+ # just include the frame in grpclib that actually raises the GRPCError
40
+ tb = exc.__traceback__
41
+ while tb.tb_next:
42
+ tb = tb.tb_next
43
+ exc.with_traceback(tb)
44
+ raise exc from None # from None to skip the grpc-internal cause
45
+ raise exc
46
+
47
+ return False
48
+
49
+
50
+ _DEFAULT_RETRY = Retry()
51
+
52
+
53
+ class UnaryUnaryWrapper(Generic[RequestType, ResponseType]):
54
+ # Calls a grpclib.UnaryUnaryMethod using a specific Client instance, respecting
55
+ # if that client is closed etc. and possibly introducing Modal-specific retry logic
56
+ wrapped_method: grpclib.client.UnaryUnaryMethod[RequestType, ResponseType]
57
+ client: "_Client"
58
+
59
+ def __init__(
60
+ self,
61
+ wrapped_method: grpclib.client.UnaryUnaryMethod[RequestType, ResponseType],
62
+ client: "_Client",
63
+ server_url: str,
64
+ ):
65
+ self.wrapped_method = wrapped_method
66
+ self.client = client
67
+ self.server_url = server_url
68
+
69
+ @property
70
+ def name(self) -> str:
71
+ return self.wrapped_method.name
72
+
73
+ @overload
74
+ async def __call__(
75
+ self,
76
+ req: RequestType,
77
+ *,
78
+ retry: Retry = _DEFAULT_RETRY,
79
+ timeout: None = None,
80
+ metadata: Optional[list[tuple[str, str]]] = None,
81
+ ) -> ResponseType: ...
82
+
83
+ @overload
84
+ async def __call__(
85
+ self,
86
+ req: RequestType,
87
+ *,
88
+ retry: None,
89
+ timeout: Optional[float] = None,
90
+ metadata: Optional[list[tuple[str, str]]] = None,
91
+ ) -> ResponseType: ...
92
+
93
+ async def __call__(
94
+ self,
95
+ req: RequestType,
96
+ *,
97
+ retry: Optional[Retry] = _DEFAULT_RETRY,
98
+ timeout: Optional[float] = None,
99
+ metadata: Optional[list[tuple[str, str]]] = None,
100
+ ) -> ResponseType:
101
+ with suppress_tb_frames(1):
102
+ if timeout is not None and retry is not None:
103
+ raise InvalidError("Retry must be None when timeout is set")
104
+
105
+ if retry is None:
106
+ return await self.direct(req, timeout=timeout, metadata=metadata)
107
+
108
+ return await _retry_transient_errors(
109
+ self, # type: ignore
110
+ req,
111
+ retry=retry,
112
+ metadata=metadata,
113
+ )
114
+
115
+ async def direct(
116
+ self,
117
+ req: RequestType,
118
+ *,
119
+ timeout: Optional[float] = None,
120
+ metadata: Optional[_MetadataLike] = None,
121
+ ) -> ResponseType:
122
+ from .client import _Client
123
+
124
+ if self.client._snapshotted:
125
+ logger.debug(f"refreshing client after snapshot for {self.name.rsplit('/', 1)[1]}")
126
+ self.client = await _Client.from_env()
127
+
128
+ # Note: We override the grpclib method's channel (see grpclib's code [1]). I think this is fine
129
+ # since grpclib's code doesn't seem to change very much, but we could also recreate the
130
+ # grpclib stub if we aren't comfortable with this. The downside is then we need to cache
131
+ # the grpclib stub so the rest of our code becomes a bit more complicated.
132
+ #
133
+ # We need to override the channel because after the process is forked or the client is
134
+ # snapshotted, the existing channel may be stale / unusable.
135
+ #
136
+ # [1]: https://github.com/vmagamedov/grpclib/blob/62f968a4c84e3f64e6966097574ff0a59969ea9b/grpclib/client.py#L844
137
+ self.wrapped_method.channel = await self.client._get_channel(self.server_url)
138
+ with suppress_tb_frames(1), grpc_error_converter():
139
+ return await self.client._call_unary(self.wrapped_method, req, timeout=timeout, metadata=metadata)
140
+
141
+
142
+ class UnaryStreamWrapper(Generic[RequestType, ResponseType]):
143
+ wrapped_method: grpclib.client.UnaryStreamMethod[RequestType, ResponseType]
144
+
145
+ def __init__(
146
+ self,
147
+ wrapped_method: grpclib.client.UnaryStreamMethod[RequestType, ResponseType],
148
+ client: "_Client",
149
+ server_url: str,
150
+ ):
151
+ self.wrapped_method = wrapped_method
152
+ self.client = client
153
+ self.server_url = server_url
154
+
155
+ @property
156
+ def name(self) -> str:
157
+ return self.wrapped_method.name
158
+
159
+ async def unary_stream(
160
+ self,
161
+ request,
162
+ metadata: Optional[Any] = None,
163
+ ):
164
+ from .client import _Client
165
+
166
+ if self.client._snapshotted:
167
+ logger.debug(f"refreshing client after snapshot for {self.name.rsplit('/', 1)[1]}")
168
+ self.client = await _Client.from_env()
169
+ self.wrapped_method.channel = await self.client._get_channel(self.server_url)
170
+ async for response in self.client._call_stream(self.wrapped_method, request, metadata=metadata):
171
+ yield response
modal/_load_context.py ADDED
@@ -0,0 +1,105 @@
1
+ # Copyright Modal Labs 2025
2
+ from typing import Optional
3
+
4
+ from .client import _Client
5
+ from .config import config
6
+
7
+
8
+ class LoadContext:
9
+ """Encapsulates optional metadata values used during object loading.
10
+
11
+ This metadata is set during object construction and propagated through
12
+ parent-child relationships (e.g., App -> Function, Cls -> Obj -> bound methods).
13
+ """
14
+
15
+ _client: Optional[_Client] = None
16
+ _environment_name: Optional[str] = None
17
+ _app_id: Optional[str] = None
18
+
19
+ def __init__(
20
+ self,
21
+ *,
22
+ client: Optional[_Client] = None,
23
+ environment_name: Optional[str] = None,
24
+ app_id: Optional[str] = None,
25
+ ):
26
+ self._client = client
27
+ self._environment_name = environment_name
28
+ self._app_id = app_id
29
+
30
+ @property
31
+ def client(self) -> _Client:
32
+ assert self._client is not None
33
+ return self._client
34
+
35
+ @property
36
+ def environment_name(self) -> str:
37
+ assert self._environment_name is not None
38
+ return self._environment_name
39
+
40
+ @property
41
+ def app_id(self) -> Optional[str]:
42
+ return self._app_id
43
+
44
+ @classmethod
45
+ def empty(cls) -> "LoadContext":
46
+ """Create an empty LoadContext with all fields set to None.
47
+
48
+ Used when loading objects that don't have a parent context.
49
+ """
50
+ return cls(client=None, environment_name=None, app_id=None)
51
+
52
+ def merged_with(self, parent: "LoadContext") -> "LoadContext":
53
+ """Create a new LoadContext with parent values filling in None fields.
54
+
55
+ Returns a new LoadContext without mutating self or parent.
56
+ Values from self take precedence over values from parent.
57
+ """
58
+ return LoadContext(
59
+ client=self._client if self._client is not None else parent._client,
60
+ environment_name=self._environment_name if self._environment_name is not None else parent._environment_name,
61
+ app_id=self._app_id if self._app_id is not None else parent._app_id,
62
+ ) # TODO (elias): apply_defaults?
63
+
64
+ async def apply_defaults(self) -> "LoadContext":
65
+ """Infer default client and environment_name if not present
66
+
67
+ Returns a new instance (no in place mutation)"""
68
+
69
+ return LoadContext(
70
+ client=await _Client.from_env() if self._client is None else self.client,
71
+ environment_name=self._environment_name or config.get("environment") or "",
72
+ app_id=self._app_id,
73
+ )
74
+
75
+ def reset(self) -> "LoadContext":
76
+ self._client = None
77
+ self._environment_name = None
78
+ self._app_id = None
79
+ return self
80
+
81
+ async def in_place_upgrade(
82
+ self, client: Optional[_Client] = None, environment_name: Optional[str] = None, app_id: Optional[str] = None
83
+ ) -> "LoadContext":
84
+ """In-place set values if they aren't already set, or set default values
85
+
86
+ Intended for Function/Cls hydration specifically
87
+
88
+ In those cases, it's important to in-place upgrade/apply_defaults since any "sibling" of the function/cls
89
+ would share the load context with its parent, and the initial load context overrides may not be sufficient
90
+ since an `app.deploy()` etc could get arguments that set a new client etc.
91
+
92
+ E.g.
93
+ @app.function()
94
+ def f():
95
+ ...
96
+
97
+ f2 = Function.with_options(...)
98
+
99
+ with app.run(client=...): # hydrates f and f2 at this point
100
+ ...
101
+ """
102
+ self._client = self._client or client or await _Client.from_env()
103
+ self._environment_name = self._environment_name or environment_name or config.get("environment") or ""
104
+ self._app_id = self._app_id or app_id
105
+ return self
modal/_object.py CHANGED
@@ -10,6 +10,7 @@ from typing_extensions import Self
10
10
 
11
11
  from modal._traceback import suppress_tb_frames
12
12
 
13
+ from ._load_context import LoadContext
13
14
  from ._resolver import Resolver
14
15
  from ._utils.async_utils import aclosing
15
16
  from ._utils.deprecation import deprecation_warning
@@ -20,11 +21,19 @@ from .exception import ExecutionError, InvalidError
20
21
  EPHEMERAL_OBJECT_HEARTBEAT_SLEEP: int = 300
21
22
 
22
23
 
23
- def _get_environment_name(environment_name: Optional[str] = None, resolver: Optional[Resolver] = None) -> Optional[str]:
24
+ def _get_environment_name(
25
+ environment_name: Optional[str] = None,
26
+ ) -> Optional[str]:
27
+ """Get environment name from various sources.
28
+
29
+ Args:
30
+ environment_name: Explicitly provided environment name (highest priority)
31
+
32
+ Returns:
33
+ Environment name from first available source, or config default
34
+ """
24
35
  if environment_name:
25
36
  return environment_name
26
- elif resolver and resolver.environment_name:
27
- return resolver.environment_name
28
37
  else:
29
38
  return config.get("environment")
30
39
 
@@ -34,13 +43,14 @@ class _Object:
34
43
  _prefix_to_type: ClassVar[dict[str, type]] = {}
35
44
 
36
45
  # For constructors
37
- _load: Optional[Callable[[Self, Resolver, Optional[str]], Awaitable[None]]]
38
- _preload: Optional[Callable[[Self, Resolver, Optional[str]], Awaitable[None]]]
46
+ _load: Optional[Callable[[Self, Resolver, LoadContext, Optional[str]], Awaitable[None]]]
47
+ _preload: Optional[Callable[[Self, Resolver, LoadContext, Optional[str]], Awaitable[None]]]
39
48
  _rep: str
40
49
  _is_another_app: bool
41
50
  _hydrate_lazily: bool
42
51
  _deps: Optional[Callable[..., Sequence["_Object"]]]
43
52
  _deduplication_key: Optional[Callable[[], Awaitable[Hashable]]] = None
53
+ _load_context_overrides: LoadContext
44
54
 
45
55
  # For hydrated objects
46
56
  _object_id: Optional[str]
@@ -48,6 +58,10 @@ class _Object:
48
58
  _is_hydrated: bool
49
59
  _is_rehydrated: bool
50
60
 
61
+ # Not all object subclasses have a meaningful "name" concept
62
+ # So whether they expose this is a matter of having a name property
63
+ _name: Optional[str]
64
+
51
65
  @classmethod
52
66
  def __init_subclass__(cls, type_prefix: Optional[str] = None):
53
67
  super().__init_subclass__()
@@ -62,12 +76,15 @@ class _Object:
62
76
  def _init(
63
77
  self,
64
78
  rep: str,
65
- load: Optional[Callable[[Self, Resolver, Optional[str]], Awaitable[None]]] = None,
79
+ load: Optional[Callable[[Self, Resolver, LoadContext, Optional[str]], Awaitable[None]]] = None,
66
80
  is_another_app: bool = False,
67
- preload: Optional[Callable[[Self, Resolver, Optional[str]], Awaitable[None]]] = None,
81
+ preload: Optional[Callable[[Self, Resolver, LoadContext, Optional[str]], Awaitable[None]]] = None,
68
82
  hydrate_lazily: bool = False,
69
83
  deps: Optional[Callable[..., Sequence["_Object"]]] = None,
70
84
  deduplication_key: Optional[Callable[[], Awaitable[Hashable]]] = None,
85
+ name: Optional[str] = None,
86
+ *,
87
+ load_context_overrides: Optional[LoadContext] = None,
71
88
  ):
72
89
  self._local_uuid = str(uuid.uuid4())
73
90
  self._load = load
@@ -77,12 +94,17 @@ class _Object:
77
94
  self._hydrate_lazily = hydrate_lazily
78
95
  self._deps = deps
79
96
  self._deduplication_key = deduplication_key
97
+ self._load_context_overrides = (
98
+ load_context_overrides if load_context_overrides is not None else LoadContext.empty()
99
+ )
80
100
 
81
101
  self._object_id = None
82
102
  self._client = None
83
103
  self._is_hydrated = False
84
104
  self._is_rehydrated = False
85
105
 
106
+ self._name = name
107
+
86
108
  self._initialize_from_empty()
87
109
 
88
110
  def _unhydrate(self):
@@ -156,17 +178,30 @@ class _Object:
156
178
  @classmethod
157
179
  def _from_loader(
158
180
  cls,
159
- load: Callable[[Self, Resolver, Optional[str]], Awaitable[None]],
181
+ load: Callable[[Self, Resolver, LoadContext, Optional[str]], Awaitable[None]],
160
182
  rep: str,
161
183
  is_another_app: bool = False,
162
- preload: Optional[Callable[[Self, Resolver, Optional[str]], Awaitable[None]]] = None,
184
+ preload: Optional[Callable[[Self, Resolver, LoadContext, Optional[str]], Awaitable[None]]] = None,
163
185
  hydrate_lazily: bool = False,
164
186
  deps: Optional[Callable[..., Sequence["_Object"]]] = None,
165
187
  deduplication_key: Optional[Callable[[], Awaitable[Hashable]]] = None,
188
+ name: Optional[str] = None,
189
+ *,
190
+ load_context_overrides: LoadContext,
166
191
  ):
167
192
  # TODO(erikbern): flip the order of the two first arguments
168
193
  obj = _Object.__new__(cls)
169
- obj._init(rep, load, is_another_app, preload, hydrate_lazily, deps, deduplication_key)
194
+ obj._init(
195
+ rep,
196
+ load,
197
+ is_another_app,
198
+ preload,
199
+ hydrate_lazily,
200
+ deps,
201
+ deduplication_key,
202
+ name,
203
+ load_context_overrides=load_context_overrides,
204
+ )
170
205
  return obj
171
206
 
172
207
  @staticmethod
@@ -183,9 +218,20 @@ class _Object:
183
218
  def _is_id_type(cls, object_id) -> bool:
184
219
  return cls._get_type_from_id(object_id) == cls
185
220
 
221
+ @classmethod
222
+ def _repr(cls, name: str, environment_name: Optional[str] = None) -> str:
223
+ public_cls = cls.__name__.strip("_")
224
+ environment_repr = f", environment_name={environment_name!r}" if environment_name else ""
225
+ return f"modal.{public_cls}.from_name({name!r}{environment_repr})"
226
+
186
227
  @classmethod
187
228
  def _new_hydrated(
188
- cls, object_id: str, client: _Client, handle_metadata: Optional[Message], is_another_app: bool = False
229
+ cls,
230
+ object_id: str,
231
+ client: _Client,
232
+ handle_metadata: Optional[Message],
233
+ is_another_app: bool = False,
234
+ rep: Optional[str] = None,
189
235
  ) -> Self:
190
236
  obj_cls: type[Self]
191
237
  if cls._type_prefix is not None:
@@ -202,7 +248,7 @@ class _Object:
202
248
 
203
249
  # Instantiate provider
204
250
  obj = _Object.__new__(obj_cls)
205
- rep = f"Object({object_id})" # TODO(erikbern): dumb
251
+ rep = rep or f"modal.{obj_cls.__name__.strip('_')}.from_id({object_id!r})"
206
252
  obj._init(rep, is_another_app=is_another_app)
207
253
  obj._hydrate(object_id, client, handle_metadata)
208
254
 
@@ -256,25 +302,27 @@ class _Object:
256
302
 
257
303
  *Added in v0.72.39*: This method replaces the deprecated `.resolve()` method.
258
304
  """
305
+ # TODO: add deprecation for the client argument here - should be added in constructors instead
259
306
  if self._is_hydrated:
260
307
  if self.client._snapshotted and not self._is_rehydrated:
261
308
  # memory snapshots capture references which must be rehydrated
262
309
  # on restore to handle staleness.
263
310
  logger.debug(f"rehydrating {self} after snapshot")
264
311
  self._is_hydrated = False # un-hydrate and re-resolve
265
- c = client if client is not None else await _Client.from_env()
266
- resolver = Resolver(c)
267
- await resolver.load(typing.cast(_Object, self))
312
+ # Set the client on LoadContext before loading
313
+ root_load_context = LoadContext(client=client)
314
+ resolver = Resolver()
315
+ await resolver.load(typing.cast(_Object, self), root_load_context)
268
316
  self._is_rehydrated = True
269
- logger.debug(f"rehydrated {self} with client {id(c)}")
317
+ logger.debug(f"rehydrated {self} with client {id(self.client)}")
270
318
  elif not self._hydrate_lazily:
271
- # TODO(michael) can remove _hydrate lazily? I think all objects support it now?
272
319
  self._validate_is_hydrated()
273
320
  else:
274
- c = client if client is not None else await _Client.from_env()
275
- resolver = Resolver(c)
321
+ # Set the client on LoadContext before loading
322
+ root_load_context = LoadContext(client=client)
323
+ resolver = Resolver()
276
324
  with suppress_tb_frames(1): # skip this frame by default
277
- await resolver.load(self)
325
+ await resolver.load(self, root_load_context)
278
326
  return self
279
327
 
280
328
 
modal/_output.py CHANGED
@@ -4,7 +4,6 @@ from __future__ import annotations
4
4
  import asyncio
5
5
  import contextlib
6
6
  import functools
7
- import io
8
7
  import platform
9
8
  import re
10
9
  import socket
@@ -32,11 +31,11 @@ from rich.progress import (
32
31
  from rich.spinner import Spinner
33
32
  from rich.text import Text
34
33
 
35
- from modal._utils.time_utils import timestamp_to_local
34
+ from modal._utils.time_utils import timestamp_to_localized_str
36
35
  from modal_proto import api_pb2
37
36
 
38
- from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
39
- from ._utils.shell_utils import stream_from_stdin
37
+ from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, Retry
38
+ from ._utils.shell_utils import stream_from_stdin, write_to_fd
40
39
  from .client import _Client
41
40
  from .config import logger
42
41
 
@@ -46,6 +45,16 @@ else:
46
45
  default_spinner = "dots"
47
46
 
48
47
 
48
+ def make_console(*, stderr: bool = False, highlight: bool = True) -> Console:
49
+ """Create a rich Console tuned for Modal CLI output."""
50
+ return Console(
51
+ stderr=stderr,
52
+ highlight=highlight,
53
+ # CLI does not work with auto-detected Jupyter HTML display_data.
54
+ force_jupyter=False,
55
+ )
56
+
57
+
49
58
  class FunctionQueuingColumn(ProgressColumn):
50
59
  """Renders time elapsed, including task.completed as additional elapsed time."""
51
60
 
@@ -63,25 +72,6 @@ class FunctionQueuingColumn(ProgressColumn):
63
72
  return Text(str(delta), style="progress.elapsed")
64
73
 
65
74
 
66
- def download_progress_bar() -> Progress:
67
- """
68
- Returns a progress bar suitable for showing file download progress.
69
- Requires passing a `path: str` data field for rendering.
70
- """
71
- return Progress(
72
- TextColumn("[bold white]{task.fields[path]}", justify="right"),
73
- BarColumn(bar_width=None),
74
- "[progress.percentage]{task.percentage:>3.1f}%",
75
- "•",
76
- DownloadColumn(),
77
- "•",
78
- TransferSpeedColumn(),
79
- "•",
80
- TimeRemainingColumn(),
81
- transient=True,
82
- )
83
-
84
-
85
75
  class LineBufferedOutput:
86
76
  """Output stream that buffers lines and passes them to a callback."""
87
77
 
@@ -101,7 +91,7 @@ class LineBufferedOutput:
101
91
 
102
92
  if self._show_timestamps:
103
93
  for i in range(0, len(chunks) - 1, 2):
104
- chunks[i] = f"{timestamp_to_local(log.timestamp)} {chunks[i]}"
94
+ chunks[i] = f"{timestamp_to_localized_str(log.timestamp)} {chunks[i]}"
105
95
 
106
96
  completed_lines = "".join(chunks[:-1])
107
97
  remainder = chunks[-1]
@@ -147,12 +137,11 @@ class OutputManager:
147
137
  def __init__(
148
138
  self,
149
139
  *,
150
- stdout: io.TextIOWrapper | None = None,
151
140
  status_spinner_text: str = "Running app...",
152
141
  show_timestamps: bool = False,
153
142
  ):
154
- self._stdout = stdout or sys.stdout
155
- self._console = Console(file=stdout, highlight=False)
143
+ self._stdout = sys.stdout
144
+ self._console = make_console(highlight=False)
156
145
  self._task_states = {}
157
146
  self._task_progress_items = {}
158
147
  self._current_render_group = None
@@ -500,12 +489,11 @@ async def stream_pty_shell_input(client: _Client, exec_id: str, finish_event: as
500
489
  """
501
490
 
502
491
  async def _handle_input(data: bytes, message_index: int):
503
- await retry_transient_errors(
504
- client.stub.ContainerExecPutInput,
492
+ await client.stub.ContainerExecPutInput(
505
493
  api_pb2.ContainerExecPutInputRequest(
506
494
  exec_id=exec_id, input=api_pb2.RuntimeInputMessage(message=data, message_index=message_index)
507
495
  ),
508
- total_timeout=10,
496
+ retry=Retry(total_timeout=10),
509
497
  )
510
498
 
511
499
  async with stream_from_stdin(_handle_input, use_raw_terminal=True):
@@ -518,17 +506,32 @@ async def put_pty_content(log: api_pb2.TaskLogs, stdout):
518
506
  # because the progress spinner can't interfere with output.
519
507
 
520
508
  data = log.data.encode("utf-8")
521
- written = 0
522
- n_retries = 0
523
- while written < len(data):
524
- try:
525
- written += stdout.buffer.write(data[written:])
526
- stdout.flush()
527
- except BlockingIOError:
528
- if n_retries >= 5:
529
- raise
530
- n_retries += 1
531
- await asyncio.sleep(0.1)
509
+ # Non-blocking terminals can fill the kernel buffer on output bursts, making flush() raise
510
+ # BlockingIOError (EAGAIN) and appear frozen until a key is pressed (this happened e.g. when
511
+ # printing large data from a pdb breakpoint). If stdout has a real fd, we await a
512
+ # non-blocking fd write (write_to_fd) instead.
513
+ fd = None
514
+ try:
515
+ if hasattr(stdout, "fileno"):
516
+ fd = stdout.fileno()
517
+ except Exception:
518
+ fd = None
519
+
520
+ if fd is not None:
521
+ await write_to_fd(fd, data)
522
+ else:
523
+ # For streams without fileno(), use the normal write/flush path.
524
+ written = 0
525
+ n_retries = 0
526
+ while written < len(data):
527
+ try:
528
+ written += stdout.buffer.write(data[written:])
529
+ stdout.flush()
530
+ except BlockingIOError:
531
+ if n_retries >= 5:
532
+ raise
533
+ n_retries += 1
534
+ await asyncio.sleep(0.1)
532
535
  else:
533
536
  # `stdout` isn't always buffered (e.g. %%capture in Jupyter notebooks redirects it to
534
537
  # io.StringIO).
@@ -548,14 +551,22 @@ async def get_app_logs_loop(
548
551
  pty_shell_stdout = None
549
552
  pty_shell_finish_event: asyncio.Event | None = None
550
553
  pty_shell_task_id: str | None = None
554
+ pty_shell_input_task: asyncio.Task | None = None
551
555
 
552
556
  async def stop_pty_shell():
553
- nonlocal pty_shell_finish_event
557
+ nonlocal pty_shell_finish_event, pty_shell_input_task
554
558
  if pty_shell_finish_event:
555
559
  print("\r", end="") # move cursor to beginning of line
556
560
  pty_shell_finish_event.set()
557
561
  pty_shell_finish_event = None
558
- await asyncio.sleep(0) # yield to handle_exec_input() so it can disable raw terminal
562
+
563
+ if pty_shell_input_task:
564
+ try:
565
+ await pty_shell_input_task
566
+ except Exception as exc:
567
+ logger.exception(f"Exception in PTY shell input task: {exc}")
568
+ finally:
569
+ pty_shell_input_task = None
559
570
 
560
571
  async def _put_log(log_batch: api_pb2.TaskLogsBatch, log: api_pb2.TaskLogs):
561
572
  if log.task_state:
@@ -583,7 +594,7 @@ async def get_app_logs_loop(
583
594
 
584
595
  async def _get_logs():
585
596
  nonlocal last_log_batch_entry_id
586
- nonlocal pty_shell_stdout, pty_shell_finish_event, pty_shell_task_id
597
+ nonlocal pty_shell_stdout, pty_shell_finish_event, pty_shell_task_id, pty_shell_input_task
587
598
 
588
599
  request = api_pb2.AppGetLogsRequest(
589
600
  app_id=app_id or "",
@@ -618,7 +629,9 @@ async def get_app_logs_loop(
618
629
  pty_shell_finish_event = asyncio.Event()
619
630
  pty_shell_task_id = log_batch.task_id
620
631
  output_mgr.disable()
621
- asyncio.create_task(stream_pty_shell_input(client, log_batch.pty_exec_id, pty_shell_finish_event))
632
+ pty_shell_input_task = asyncio.create_task(
633
+ stream_pty_shell_input(client, log_batch.pty_exec_id, pty_shell_finish_event)
634
+ )
622
635
  else:
623
636
  for log in log_batch.items:
624
637
  await _put_log(log_batch, log)