modal 1.1.1.dev38__py3-none-any.whl → 1.1.1.dev40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modal might be problematic. Click here for more details.

modal/_functions.py CHANGED
@@ -1513,8 +1513,7 @@ Use the `Function.get_web_url()` method instead.
1513
1513
  else:
1514
1514
  count_update_callback = None
1515
1515
 
1516
- # TODO(ben-okeefe): Feature gating for input plane map until feature is enabled.
1517
- if self._input_plane_url and False:
1516
+ if self._input_plane_url:
1518
1517
  async with aclosing(
1519
1518
  _map_invocation_inputplane(
1520
1519
  self,
modal/_output.py CHANGED
@@ -31,7 +31,7 @@ from rich.progress import (
31
31
  from rich.spinner import Spinner
32
32
  from rich.text import Text
33
33
 
34
- from modal._utils.time_utils import timestamp_to_local
34
+ from modal._utils.time_utils import timestamp_to_localized_str
35
35
  from modal_proto import api_pb2
36
36
 
37
37
  from ._utils.grpc_utils import RETRYABLE_GRPC_STATUS_CODES, retry_transient_errors
@@ -91,7 +91,7 @@ class LineBufferedOutput:
91
91
 
92
92
  if self._show_timestamps:
93
93
  for i in range(0, len(chunks) - 1, 2):
94
- chunks[i] = f"{timestamp_to_local(log.timestamp)} {chunks[i]}"
94
+ chunks[i] = f"{timestamp_to_localized_str(log.timestamp)} {chunks[i]}"
95
95
 
96
96
  completed_lines = "".join(chunks[:-1])
97
97
  remainder = chunks[-1]
@@ -279,7 +279,9 @@ class TimestampPriorityQueue(Generic[T]):
279
279
 
280
280
  def __init__(self, maxsize: int = 0):
281
281
  self.condition = asyncio.Condition()
282
- self._queue: asyncio.PriorityQueue[tuple[float, Union[T, None]]] = asyncio.PriorityQueue(maxsize=maxsize)
282
+ self._queue: asyncio.PriorityQueue[tuple[float, int, Union[T, None]]] = asyncio.PriorityQueue(maxsize=maxsize)
283
+ # Used to tiebreak items with the same timestamp that are not comparable. (eg. protos)
284
+ self._counter = itertools.count()
283
285
 
284
286
  async def close(self):
285
287
  await self.put(self._MAX_PRIORITY, None)
@@ -288,7 +290,7 @@ class TimestampPriorityQueue(Generic[T]):
288
290
  """
289
291
  Add an item to the queue to be processed at a specific timestamp.
290
292
  """
291
- await self._queue.put((timestamp, item))
293
+ await self._queue.put((timestamp, next(self._counter), item))
292
294
  async with self.condition:
293
295
  self.condition.notify_all() # notify any waiting coroutines
294
296
 
@@ -301,7 +303,7 @@ class TimestampPriorityQueue(Generic[T]):
301
303
  while self.empty():
302
304
  await self.condition.wait()
303
305
  # peek at the next item
304
- timestamp, item = await self._queue.get()
306
+ timestamp, counter, item = await self._queue.get()
305
307
  now = time.time()
306
308
  if timestamp < now:
307
309
  return item
@@ -309,7 +311,7 @@ class TimestampPriorityQueue(Generic[T]):
309
311
  return None
310
312
  # not ready yet, calculate sleep time
311
313
  sleep_time = timestamp - now
312
- self._queue.put_nowait((timestamp, item)) # put it back
314
+ self._queue.put_nowait((timestamp, counter, item)) # put it back
313
315
  # wait until either the timeout or a new item is added
314
316
  try:
315
317
  await asyncio.wait_for(self.condition.wait(), timeout=sleep_time)
@@ -3,13 +3,17 @@ from datetime import datetime
3
3
  from typing import Optional
4
4
 
5
5
 
6
- def timestamp_to_local(ts: float, isotz: bool = True) -> Optional[str]:
6
+ def timestamp_to_localized_dt(ts: float) -> datetime:
7
+ locale_tz = datetime.now().astimezone().tzinfo
8
+ return datetime.fromtimestamp(ts, tz=locale_tz)
9
+
10
+
11
+ def timestamp_to_localized_str(ts: float, isotz: bool = True) -> Optional[str]:
7
12
  if ts > 0:
8
- locale_tz = datetime.now().astimezone().tzinfo
9
- dt = datetime.fromtimestamp(ts, tz=locale_tz)
13
+ dt = timestamp_to_localized_dt(ts)
10
14
  if isotz:
11
15
  return dt.isoformat(sep=" ", timespec="seconds")
12
16
  else:
13
- return f"{datetime.strftime(dt, '%Y-%m-%d %H:%M')} {locale_tz.tzname(dt)}"
17
+ return f"{dt:%Y-%m-%d %H:%M %Z}"
14
18
  else:
15
19
  return None
modal/cli/app.py CHANGED
@@ -15,7 +15,7 @@ from modal.client import _Client
15
15
  from modal.environments import ensure_env
16
16
  from modal_proto import api_pb2
17
17
 
18
- from .._utils.time_utils import timestamp_to_local
18
+ from .._utils.time_utils import timestamp_to_localized_str
19
19
  from .utils import ENV_OPTION, display_table, get_app_id_from_name, stream_app_logs
20
20
 
21
21
  APP_IDENTIFIER = Argument("", help="App name or ID")
@@ -71,8 +71,8 @@ async def list_(env: Optional[str] = ENV_OPTION, json: bool = False):
71
71
  app_stats.description,
72
72
  state,
73
73
  str(app_stats.n_running_tasks),
74
- timestamp_to_local(app_stats.created_at, json),
75
- timestamp_to_local(app_stats.stopped_at, json),
74
+ timestamp_to_localized_str(app_stats.created_at, json),
75
+ timestamp_to_localized_str(app_stats.stopped_at, json),
76
76
  ]
77
77
  )
78
78
 
@@ -217,7 +217,7 @@ async def history(
217
217
 
218
218
  row = [
219
219
  Text(f"v{app_stats.version}", style=style),
220
- Text(timestamp_to_local(app_stats.deployed_at, json), style=style),
220
+ Text(timestamp_to_localized_str(app_stats.deployed_at, json), style=style),
221
221
  Text(app_stats.client_version, style=style),
222
222
  Text(app_stats.deployed_by, style=style),
223
223
  ]
modal/cli/cluster.py CHANGED
@@ -8,7 +8,7 @@ from modal._object import _get_environment_name
8
8
  from modal._output import make_console
9
9
  from modal._pty import get_pty_info
10
10
  from modal._utils.async_utils import synchronizer
11
- from modal._utils.time_utils import timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_localized_str
12
12
  from modal.cli.utils import ENV_OPTION, display_table, is_tty
13
13
  from modal.client import _Client
14
14
  from modal.config import config
@@ -42,7 +42,7 @@ async def list_(env: Optional[str] = ENV_OPTION, json: bool = False):
42
42
  [
43
43
  c.cluster_id,
44
44
  c.app_id,
45
- timestamp_to_local(c.started_at, json) if c.started_at else "Pending",
45
+ timestamp_to_localized_str(c.started_at, json) if c.started_at else "Pending",
46
46
  str(len(c.task_ids)),
47
47
  ]
48
48
  )
modal/cli/container.py CHANGED
@@ -8,7 +8,7 @@ from modal._object import _get_environment_name
8
8
  from modal._pty import get_pty_info
9
9
  from modal._utils.async_utils import synchronizer
10
10
  from modal._utils.grpc_utils import retry_transient_errors
11
- from modal._utils.time_utils import timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_localized_str
12
12
  from modal.cli.utils import ENV_OPTION, display_table, is_tty, stream_app_logs
13
13
  from modal.client import _Client
14
14
  from modal.config import config
@@ -40,7 +40,7 @@ async def list_(env: Optional[str] = ENV_OPTION, json: bool = False):
40
40
  task_stats.task_id,
41
41
  task_stats.app_id,
42
42
  task_stats.app_description,
43
- timestamp_to_local(task_stats.started_at, json) if task_stats.started_at else "Pending",
43
+ timestamp_to_localized_str(task_stats.started_at, json) if task_stats.started_at else "Pending",
44
44
  ]
45
45
  )
46
46
 
modal/cli/dict.py CHANGED
@@ -8,7 +8,7 @@ from modal._output import make_console
8
8
  from modal._resolver import Resolver
9
9
  from modal._utils.async_utils import synchronizer
10
10
  from modal._utils.grpc_utils import retry_transient_errors
11
- from modal._utils.time_utils import timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_localized_str
12
12
  from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
13
13
  from modal.client import _Client
14
14
  from modal.dict import _Dict
@@ -44,7 +44,7 @@ async def list_(*, json: bool = False, env: Optional[str] = ENV_OPTION):
44
44
  request = api_pb2.DictListRequest(environment_name=env)
45
45
  response = await retry_transient_errors(client.stub.DictList, request)
46
46
 
47
- rows = [(d.name, timestamp_to_local(d.created_at, json)) for d in response.dicts]
47
+ rows = [(d.name, timestamp_to_localized_str(d.created_at, json)) for d in response.dicts]
48
48
  display_table(["Name", "Created at"], rows, json)
49
49
 
50
50
 
@@ -16,7 +16,7 @@ from modal._location import display_location
16
16
  from modal._output import OutputManager, ProgressHandler, make_console
17
17
  from modal._utils.async_utils import synchronizer
18
18
  from modal._utils.grpc_utils import retry_transient_errors
19
- from modal._utils.time_utils import timestamp_to_local
19
+ from modal._utils.time_utils import timestamp_to_localized_str
20
20
  from modal.cli._download import _volume_download
21
21
  from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
22
22
  from modal.client import _Client
@@ -44,7 +44,7 @@ async def list_(env: Optional[str] = ENV_OPTION, json: Optional[bool] = False):
44
44
  [
45
45
  item.label,
46
46
  display_location(item.cloud_provider),
47
- timestamp_to_local(item.created_at, json),
47
+ timestamp_to_localized_str(item.created_at, json),
48
48
  ]
49
49
  )
50
50
  display_table(column_names, rows, json, title=f"Shared Volumes{env_part}")
modal/cli/queues.py CHANGED
@@ -8,7 +8,7 @@ from modal._output import make_console
8
8
  from modal._resolver import Resolver
9
9
  from modal._utils.async_utils import synchronizer
10
10
  from modal._utils.grpc_utils import retry_transient_errors
11
- from modal._utils.time_utils import timestamp_to_local
11
+ from modal._utils.time_utils import timestamp_to_localized_str
12
12
  from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
13
13
  from modal.client import _Client
14
14
  from modal.environments import ensure_env
@@ -71,7 +71,7 @@ async def list_(*, json: bool = False, env: Optional[str] = ENV_OPTION):
71
71
  rows = [
72
72
  (
73
73
  q.name,
74
- timestamp_to_local(q.created_at, json),
74
+ timestamp_to_localized_str(q.created_at, json),
75
75
  str(q.num_partitions),
76
76
  str(q.total_size) if q.total_size <= max_total_size else f">{max_total_size}",
77
77
  )
modal/cli/secret.py CHANGED
@@ -15,7 +15,7 @@ from typer import Argument
15
15
  from modal._output import make_console
16
16
  from modal._utils.async_utils import synchronizer
17
17
  from modal._utils.grpc_utils import retry_transient_errors
18
- from modal._utils.time_utils import timestamp_to_local
18
+ from modal._utils.time_utils import timestamp_to_localized_str
19
19
  from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
20
20
  from modal.client import _Client
21
21
  from modal.environments import ensure_env
@@ -38,8 +38,8 @@ async def list_(env: Optional[str] = ENV_OPTION, json: bool = False):
38
38
  rows.append(
39
39
  [
40
40
  item.label,
41
- timestamp_to_local(item.created_at, json),
42
- timestamp_to_local(item.last_used_at, json) if item.last_used_at else "-",
41
+ timestamp_to_localized_str(item.created_at, json),
42
+ timestamp_to_localized_str(item.last_used_at, json) if item.last_used_at else "-",
43
43
  ]
44
44
  )
45
45
 
modal/cli/volume.py CHANGED
@@ -14,7 +14,7 @@ import modal
14
14
  from modal._output import OutputManager, ProgressHandler, make_console
15
15
  from modal._utils.async_utils import synchronizer
16
16
  from modal._utils.grpc_utils import retry_transient_errors
17
- from modal._utils.time_utils import timestamp_to_local
17
+ from modal._utils.time_utils import timestamp_to_localized_str
18
18
  from modal.cli._download import _volume_download
19
19
  from modal.cli.utils import ENV_OPTION, YES_OPTION, display_table
20
20
  from modal.client import _Client
@@ -116,7 +116,7 @@ async def list_(env: Optional[str] = ENV_OPTION, json: Optional[bool] = False):
116
116
  column_names = ["Name", "Created at"]
117
117
  rows = []
118
118
  for item in response.items:
119
- rows.append([item.label, timestamp_to_local(item.created_at, json)])
119
+ rows.append([item.label, timestamp_to_localized_str(item.created_at, json)])
120
120
  display_table(column_names, rows, json, title=f"Volumes{env_part}")
121
121
 
122
122
 
@@ -163,7 +163,7 @@ async def ls(
163
163
  (
164
164
  entry.path.encode("unicode_escape").decode("utf-8"),
165
165
  filetype,
166
- timestamp_to_local(entry.mtime, False),
166
+ timestamp_to_localized_str(entry.mtime, False),
167
167
  humanize_filesize(entry.size),
168
168
  )
169
169
  )
modal/client.pyi CHANGED
@@ -33,7 +33,7 @@ class _Client:
33
33
  server_url: str,
34
34
  client_type: int,
35
35
  credentials: typing.Optional[tuple[str, str]],
36
- version: str = "1.1.1.dev38",
36
+ version: str = "1.1.1.dev40",
37
37
  ):
38
38
  """mdmd:hidden
39
39
  The Modal client object is not intended to be instantiated directly by users.
@@ -164,7 +164,7 @@ class Client:
164
164
  server_url: str,
165
165
  client_type: int,
166
166
  credentials: typing.Optional[tuple[str, str]],
167
- version: str = "1.1.1.dev38",
167
+ version: str = "1.1.1.dev40",
168
168
  ):
169
169
  """mdmd:hidden
170
170
  The Modal client object is not intended to be instantiated directly by users.
modal/dict.py CHANGED
@@ -17,6 +17,7 @@ from ._utils.async_utils import TaskContext, synchronize_api
17
17
  from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
18
18
  from ._utils.grpc_utils import retry_transient_errors
19
19
  from ._utils.name_utils import check_object_name
20
+ from ._utils.time_utils import timestamp_to_localized_dt
20
21
  from .client import _Client
21
22
  from .config import logger
22
23
  from .exception import RequestSizeError
@@ -248,7 +249,7 @@ class _Dict(_Object, type_prefix="di"):
248
249
  creation_info = metadata.creation_info
249
250
  return DictInfo(
250
251
  name=metadata.name or None,
251
- created_at=datetime.fromtimestamp(creation_info.created_at),
252
+ created_at=timestamp_to_localized_dt(creation_info.created_at),
252
253
  created_by=creation_info.created_by or None,
253
254
  )
254
255
 
modal/parallel_map.py CHANGED
@@ -6,7 +6,7 @@ import time
6
6
  import typing
7
7
  from asyncio import FIRST_COMPLETED
8
8
  from dataclasses import dataclass
9
- from typing import Any, Callable, Optional
9
+ from typing import Any, Callable, Optional, Union
10
10
 
11
11
  from grpclib import Status
12
12
 
@@ -437,17 +437,14 @@ async def _map_invocation_inputplane(
437
437
 
438
438
  This is analogous to `_map_invocation`, but instead of the control-plane
439
439
  `FunctionMap` / `FunctionPutInputs` / `FunctionGetOutputs` RPCs it speaks
440
- the input-plane protocol consisting of `MapStartOrContinue` and `MapAwait`.
441
-
442
- The implementation purposefully ignores retry handling for now - a stub is
443
- left in place so that a future change can add support for the retry path
444
- without re-structuring the surrounding code.
440
+ the input-plane protocol consisting of `MapStartOrContinue`, `MapAwait`, and `MapCheckInputs`.
445
441
  """
446
442
 
447
443
  assert function._input_plane_url, "_map_invocation_inputplane should only be used for input-plane backed functions"
448
444
 
449
445
  input_plane_stub = await client.get_stub(function._input_plane_url)
450
446
 
447
+ # Required for _create_input.
451
448
  assert client.stub, "Client must be hydrated with a stub for _map_invocation_inputplane"
452
449
 
453
450
  # ------------------------------------------------------------
@@ -459,15 +456,19 @@ async def _map_invocation_inputplane(
459
456
 
460
457
  inputs_created = 0
461
458
  outputs_completed = 0
459
+ successful_completions = 0
460
+ failed_completions = 0
461
+ no_context_duplicates = 0
462
+ stale_retry_duplicates = 0
463
+ already_complete_duplicates = 0
464
+ retried_outputs = 0
465
+ input_queue_size = 0
466
+ last_entry_id = ""
462
467
 
463
468
  # The input-plane server returns this after the first request.
464
- function_call_id: str | None = None
469
+ function_call_id = None
465
470
  function_call_id_received = asyncio.Event()
466
471
 
467
- # Map of idx -> attempt_token returned by the server. This will be needed
468
- # for a future client-side retry implementation.
469
- attempt_tokens: dict[int, str] = {}
470
-
471
472
  # Single priority queue that holds *both* fresh inputs (timestamp == now)
472
473
  # and future retries (timestamp > now).
473
474
  queue: TimestampPriorityQueue[api_pb2.MapStartOrContinueItem] = TimestampPriorityQueue()
@@ -477,11 +478,25 @@ async def _map_invocation_inputplane(
477
478
  # any reason).
478
479
  max_inputs_outstanding = MAX_INPUTS_OUTSTANDING_DEFAULT
479
480
 
480
- # ------------------------------------------------------------
481
- # Helper functions
482
- # ------------------------------------------------------------
481
+ # Input plane does not yet return a retry policy. So we currently disable retries.
482
+ retry_policy = api_pb2.FunctionRetryPolicy(
483
+ retries=0, # Input plane does not yet return a retry policy. So only retry server failures for now.
484
+ initial_delay_ms=1000,
485
+ max_delay_ms=1000,
486
+ backoff_coefficient=1.0,
487
+ )
488
+ map_items_manager = _MapItemsManager(
489
+ retry_policy=retry_policy,
490
+ function_call_invocation_type=api_pb2.FUNCTION_CALL_INVOCATION_TYPE_SYNC,
491
+ retry_queue=queue,
492
+ sync_client_retries_enabled=True,
493
+ max_inputs_outstanding=MAX_INPUTS_OUTSTANDING_DEFAULT,
494
+ is_input_plane_instance=True,
495
+ )
483
496
 
484
- def update_counters(created_delta: int = 0, completed_delta: int = 0, set_have_all_inputs: bool | None = None):
497
+ def update_counters(
498
+ created_delta: int = 0, completed_delta: int = 0, set_have_all_inputs: Union[bool, None] = None
499
+ ):
485
500
  nonlocal inputs_created, outputs_completed, have_all_inputs
486
501
 
487
502
  if created_delta:
@@ -511,10 +526,6 @@ async def _map_invocation_inputplane(
511
526
  )
512
527
  return api_pb2.MapStartOrContinueItem(input=put_item)
513
528
 
514
- # ------------------------------------------------------------
515
- # Coroutine: drain user input iterator, upload blobs, enqueue for sending
516
- # ------------------------------------------------------------
517
-
518
529
  async def input_iter():
519
530
  while True:
520
531
  raw_input = await raw_input_queue.get()
@@ -530,22 +541,19 @@ async def _map_invocation_inputplane(
530
541
  await queue.put(time.time(), q_item)
531
542
 
532
543
  # All inputs have been read.
533
- await queue.close()
534
544
  update_counters(set_have_all_inputs=True)
535
545
  yield
536
546
 
537
- # ------------------------------------------------------------
538
- # Coroutine: send queued items to the input-plane server
539
- # ------------------------------------------------------------
540
-
541
547
  async def pump_inputs():
542
548
  nonlocal function_call_id, max_inputs_outstanding
543
-
544
549
  async for batch in queue_batch_iterator(queue, max_batch_size=MAP_INVOCATION_CHUNK_SIZE):
545
550
  # Convert the queued items into the proto format expected by the RPC.
546
551
  request_items: list[api_pb2.MapStartOrContinueItem] = [
547
552
  api_pb2.MapStartOrContinueItem(input=qi.input, attempt_token=qi.attempt_token) for qi in batch
548
553
  ]
554
+
555
+ await map_items_manager.add_items_inputplane(request_items)
556
+
549
557
  # Build request
550
558
  request = api_pb2.MapStartOrContinueRequest(
551
559
  function_id=function.object_id,
@@ -560,43 +568,62 @@ async def _map_invocation_inputplane(
560
568
  input_plane_stub.MapStartOrContinue, request, metadata=metadata
561
569
  )
562
570
 
563
- # TODO(ben-okeefe): Understand if an input could be lost at this step and not registered
571
+ # match response items to the corresponding request item index
572
+ response_items_idx_tuple = [
573
+ (request_items[idx].input.idx, attempt_token)
574
+ for idx, attempt_token in enumerate(response.attempt_tokens)
575
+ ]
576
+
577
+ map_items_manager.handle_put_continue_response(response_items_idx_tuple)
564
578
 
565
579
  if function_call_id is None:
566
580
  function_call_id = response.function_call_id
567
581
  function_call_id_received.set()
568
582
  max_inputs_outstanding = response.max_inputs_outstanding or MAX_INPUTS_OUTSTANDING_DEFAULT
569
-
570
- # Record attempt tokens for future retries; also release semaphore slots now that the
571
- # inputs are officially registered on the server.
572
- for idx, attempt_token in enumerate(response.attempt_tokens):
573
- # Client expects the server to return the attempt tokens in the same order as the inputs we sent.
574
- attempt_tokens[request_items[idx].input.idx] = attempt_token
575
-
576
583
  yield
577
584
 
578
- # ------------------------------------------------------------
579
- # Coroutine: **stub** – retry handling will be added in the future
580
- # ------------------------------------------------------------
581
-
582
- async def retry_inputs():
583
- """Temporary stub for retrying inputs. Retry handling will be added in the future."""
584
-
585
+ async def check_lost_inputs():
586
+ nonlocal last_entry_id # shared with get_all_outputs
585
587
  try:
586
588
  while not map_done_event.is_set():
589
+ if function_call_id is None:
590
+ await function_call_id_received.wait()
591
+ continue
592
+
587
593
  await asyncio.sleep(1)
588
- if False:
589
- yield
594
+
595
+ # check_inputs = [(idx, attempt_token), ...]
596
+ check_inputs = map_items_manager.get_input_idxs_waiting_for_output()
597
+ attempt_tokens = [attempt_token for _, attempt_token in check_inputs]
598
+ request = api_pb2.MapCheckInputsRequest(
599
+ last_entry_id=last_entry_id,
600
+ timeout=0, # Non-blocking read
601
+ attempt_tokens=attempt_tokens,
602
+ )
603
+
604
+ metadata = await client.get_input_plane_metadata(function._input_plane_region)
605
+ response: api_pb2.MapCheckInputsResponse = await retry_transient_errors(
606
+ input_plane_stub.MapCheckInputs, request, metadata=metadata
607
+ )
608
+ check_inputs_response = [
609
+ (check_inputs[resp_idx][0], response.lost[resp_idx]) for resp_idx, _ in enumerate(response.lost)
610
+ ]
611
+ # check_inputs_response = [(idx, lost: bool), ...]
612
+ await map_items_manager.handle_check_inputs_response(check_inputs_response)
613
+ yield
590
614
  except asyncio.CancelledError:
591
615
  pass
592
616
 
593
- # ------------------------------------------------------------
594
- # Coroutine: stream outputs via MapAwait
595
- # ------------------------------------------------------------
596
-
597
617
  async def get_all_outputs():
598
- """Continuously fetch outputs until the map is complete."""
599
- last_entry_id = ""
618
+ nonlocal \
619
+ successful_completions, \
620
+ failed_completions, \
621
+ no_context_duplicates, \
622
+ stale_retry_duplicates, \
623
+ already_complete_duplicates, \
624
+ retried_outputs, \
625
+ last_entry_id
626
+
600
627
  while not map_done_event.is_set():
601
628
  if function_call_id is None:
602
629
  await function_call_id_received.wait()
@@ -609,21 +636,51 @@ async def _map_invocation_inputplane(
609
636
  timeout=OUTPUTS_TIMEOUT,
610
637
  )
611
638
  metadata = await client.get_input_plane_metadata(function._input_plane_region)
612
- response: api_pb2.MapAwaitResponse = await retry_transient_errors(
613
- input_plane_stub.MapAwait,
614
- request,
615
- max_retries=20,
616
- attempt_timeout=OUTPUTS_TIMEOUT + ATTEMPT_TIMEOUT_GRACE_PERIOD,
617
- metadata=metadata,
639
+ get_response_task = asyncio.create_task(
640
+ retry_transient_errors(
641
+ input_plane_stub.MapAwait,
642
+ request,
643
+ max_retries=20,
644
+ attempt_timeout=OUTPUTS_TIMEOUT + ATTEMPT_TIMEOUT_GRACE_PERIOD,
645
+ metadata=metadata,
646
+ )
618
647
  )
648
+ map_done_task = asyncio.create_task(map_done_event.wait())
649
+ try:
650
+ done, pending = await asyncio.wait([get_response_task, map_done_task], return_when=FIRST_COMPLETED)
651
+ if get_response_task in done:
652
+ map_done_task.cancel()
653
+ response = get_response_task.result()
654
+ else:
655
+ assert map_done_event.is_set()
656
+ # map is done - no more outputs, so return early
657
+ return
658
+ finally:
659
+ # clean up tasks, in case of cancellations etc.
660
+ get_response_task.cancel()
661
+ map_done_task.cancel()
619
662
  last_entry_id = response.last_entry_id
620
663
 
621
664
  for output_item in response.outputs:
622
- yield output_item
623
-
624
- update_counters(completed_delta=1)
665
+ output_type = await map_items_manager.handle_get_outputs_response(output_item, int(time.time()))
666
+ if output_type == _OutputType.SUCCESSFUL_COMPLETION:
667
+ successful_completions += 1
668
+ elif output_type == _OutputType.FAILED_COMPLETION:
669
+ failed_completions += 1
670
+ elif output_type == _OutputType.RETRYING:
671
+ retried_outputs += 1
672
+ elif output_type == _OutputType.NO_CONTEXT_DUPLICATE:
673
+ no_context_duplicates += 1
674
+ elif output_type == _OutputType.STALE_RETRY_DUPLICATE:
675
+ stale_retry_duplicates += 1
676
+ elif output_type == _OutputType.ALREADY_COMPLETE_DUPLICATE:
677
+ already_complete_duplicates += 1
678
+ else:
679
+ raise Exception(f"Unknown output type: {output_type}")
625
680
 
626
- # The loop condition will exit when map_done_event is set from update_counters.
681
+ if output_type == _OutputType.SUCCESSFUL_COMPLETION or output_type == _OutputType.FAILED_COMPLETION:
682
+ update_counters(completed_delta=1)
683
+ yield output_item
627
684
 
628
685
  async def get_all_outputs_and_clean_up():
629
686
  try:
@@ -631,23 +688,24 @@ async def _map_invocation_inputplane(
631
688
  async for item in stream:
632
689
  yield item
633
690
  finally:
634
- # We could signal server we are done with outputs so it can clean up.
691
+ await queue.close()
635
692
  pass
636
693
 
637
- # ------------------------------------------------------------
638
- # Coroutine: convert FunctionGetOutputsItem → actual result value
639
- # ------------------------------------------------------------
640
-
641
694
  async def fetch_output(item: api_pb2.FunctionGetOutputsItem) -> tuple[int, Any]:
642
695
  try:
643
- output_val = await _process_result(item.result, item.data_format, input_plane_stub, client)
644
- except Exception as exc:
696
+ output = await _process_result(item.result, item.data_format, input_plane_stub, client)
697
+ except Exception as e:
645
698
  if return_exceptions:
646
- output_val = exc
699
+ if wrap_returned_exceptions:
700
+ # Prior to client 1.0.4 there was a bug where return_exceptions would wrap
701
+ # any returned exceptions in a synchronicity.UserCodeException. This adds
702
+ # deprecated non-breaking compatibility bandaid for migrating away from that:
703
+ output = modal.exception.UserCodeException(e)
704
+ else:
705
+ output = e
647
706
  else:
648
- raise exc
649
-
650
- return (item.idx, output_val)
707
+ raise e
708
+ return (item.idx, output)
651
709
 
652
710
  async def poll_outputs():
653
711
  # map to store out-of-order outputs received
@@ -677,17 +735,14 @@ async def _map_invocation_inputplane(
677
735
 
678
736
  assert len(received_outputs) == 0
679
737
 
680
- # ------------------------------------------------------------
681
- # Debug-logging helper
682
- # ------------------------------------------------------------
683
738
  async def log_debug_stats():
684
739
  def log_stats():
685
740
  logger.debug(
686
- "Map-IP stats: have_all_inputs=%s inputs_created=%d outputs_completed=%d queue_size=%d",
687
- have_all_inputs,
688
- inputs_created,
689
- outputs_completed,
690
- queue.qsize(),
741
+ f"Map stats:\nsuccessful_completions={successful_completions} failed_completions={failed_completions} "
742
+ f"no_context_duplicates={no_context_duplicates} stale_retry_duplicates={stale_retry_duplicates} "
743
+ f"already_complete_duplicates={already_complete_duplicates} retried_outputs={retried_outputs} "
744
+ f"function_call_id={function_call_id} max_inputs_outstanding={max_inputs_outstanding} "
745
+ f"map_items_manager_size={len(map_items_manager)} input_queue_size={input_queue_size}"
691
746
  )
692
747
 
693
748
  while True:
@@ -699,13 +754,11 @@ async def _map_invocation_inputplane(
699
754
  log_stats()
700
755
  break
701
756
 
702
- # ------------------------------------------------------------
703
- # Run the four coroutines concurrently and yield results as they arrive
704
- # ------------------------------------------------------------
705
-
706
757
  log_task = asyncio.create_task(log_debug_stats())
707
758
 
708
- async with aclosing(async_merge(drain_input_generator(), pump_inputs(), poll_outputs(), retry_inputs())) as merged:
759
+ async with aclosing(
760
+ async_merge(drain_input_generator(), pump_inputs(), poll_outputs(), check_lost_inputs())
761
+ ) as merged:
709
762
  async for maybe_output in merged:
710
763
  if maybe_output is not None: # ignore None sentinels
711
764
  yield maybe_output.value
@@ -1045,12 +1098,19 @@ class _MapItemContext:
1045
1098
  sync_client_retries_enabled: bool
1046
1099
  # Both these futures are strings. Omitting generic type because
1047
1100
  # it causes an error when running `inv protoc type-stubs`.
1101
+ # Unused. But important, input_id is not set for inputplane invocations.
1048
1102
  input_id: asyncio.Future
1049
1103
  input_jwt: asyncio.Future
1050
1104
  previous_input_jwt: Optional[str]
1051
1105
  _event_loop: asyncio.AbstractEventLoop
1052
1106
 
1053
- def __init__(self, input: api_pb2.FunctionInput, retry_manager: RetryManager, sync_client_retries_enabled: bool):
1107
+ def __init__(
1108
+ self,
1109
+ input: api_pb2.FunctionInput,
1110
+ retry_manager: RetryManager,
1111
+ sync_client_retries_enabled: bool,
1112
+ is_input_plane_instance: bool = False,
1113
+ ):
1054
1114
  self.state = _MapItemState.SENDING
1055
1115
  self.input = input
1056
1116
  self.retry_manager = retry_manager
@@ -1061,7 +1121,22 @@ class _MapItemContext:
1061
1121
  # a race condition where we could receive outputs before we have
1062
1122
  # recorded the input ID and JWT in `pending_outputs`.
1063
1123
  self.input_jwt = self._event_loop.create_future()
1124
+ # Unused. But important, this is not set for inputplane invocations.
1064
1125
  self.input_id = self._event_loop.create_future()
1126
+ self._is_input_plane_instance = is_input_plane_instance
1127
+
1128
+ def handle_map_start_or_continue_response(self, attempt_token: str):
1129
+ if not self.input_jwt.done():
1130
+ self.input_jwt.set_result(attempt_token)
1131
+ else:
1132
+ # Create a new future for the next value
1133
+ self.input_jwt = asyncio.Future()
1134
+ self.input_jwt.set_result(attempt_token)
1135
+
1136
+ # Set state to WAITING_FOR_OUTPUT only if current state is SENDING. If state is
1137
+ # RETRYING, WAITING_TO_RETRY, or COMPLETE, then we already got the output.
1138
+ if self.state == _MapItemState.SENDING:
1139
+ self.state = _MapItemState.WAITING_FOR_OUTPUT
1065
1140
 
1066
1141
  def handle_put_inputs_response(self, item: api_pb2.FunctionPutInputsResponseItem):
1067
1142
  self.input_jwt.set_result(item.input_jwt)
@@ -1088,7 +1163,7 @@ class _MapItemContext:
1088
1163
  if self.state == _MapItemState.COMPLETE:
1089
1164
  logger.debug(
1090
1165
  f"Received output for input marked as complete. Must be duplicate, so ignoring. "
1091
- f"idx={item.idx} input_id={item.input_id}, retry_count={item.retry_count}"
1166
+ f"idx={item.idx} input_id={item.input_id} retry_count={item.retry_count}"
1092
1167
  )
1093
1168
  return _OutputType.ALREADY_COMPLETE_DUPLICATE
1094
1169
  # If the item's retry count doesn't match our retry count, this is probably a duplicate of an old output.
@@ -1136,7 +1211,11 @@ class _MapItemContext:
1136
1211
 
1137
1212
  self.state = _MapItemState.WAITING_TO_RETRY
1138
1213
 
1139
- await retry_queue.put(now_seconds + (delay_ms / 1000), item.idx)
1214
+ if self._is_input_plane_instance:
1215
+ retry_item = await self.create_map_start_or_continue_item(item.idx)
1216
+ await retry_queue.put(now_seconds + delay_ms / 1_000, retry_item)
1217
+ else:
1218
+ await retry_queue.put(now_seconds + delay_ms / 1_000, item.idx)
1140
1219
 
1141
1220
  return _OutputType.RETRYING
1142
1221
 
@@ -1155,6 +1234,16 @@ class _MapItemContext:
1155
1234
  self.input_jwt.set_result(input_jwt)
1156
1235
  self.state = _MapItemState.WAITING_FOR_OUTPUT
1157
1236
 
1237
+ async def create_map_start_or_continue_item(self, idx: int) -> api_pb2.MapStartOrContinueItem:
1238
+ attempt_token = await self.input_jwt
1239
+ return api_pb2.MapStartOrContinueItem(
1240
+ input=api_pb2.FunctionPutInputsItem(
1241
+ input=self.input,
1242
+ idx=idx,
1243
+ ),
1244
+ attempt_token=attempt_token,
1245
+ )
1246
+
1158
1247
 
1159
1248
  class _MapItemsManager:
1160
1249
  def __init__(
@@ -1164,6 +1253,7 @@ class _MapItemsManager:
1164
1253
  retry_queue: TimestampPriorityQueue,
1165
1254
  sync_client_retries_enabled: bool,
1166
1255
  max_inputs_outstanding: int,
1256
+ is_input_plane_instance: bool = False,
1167
1257
  ):
1168
1258
  self._retry_policy = retry_policy
1169
1259
  self.function_call_invocation_type = function_call_invocation_type
@@ -1174,6 +1264,7 @@ class _MapItemsManager:
1174
1264
  self._inputs_outstanding = asyncio.BoundedSemaphore(max_inputs_outstanding)
1175
1265
  self._item_context: dict[int, _MapItemContext] = {}
1176
1266
  self._sync_client_retries_enabled = sync_client_retries_enabled
1267
+ self._is_input_plane_instance = is_input_plane_instance
1177
1268
 
1178
1269
  async def add_items(self, items: list[api_pb2.FunctionPutInputsItem]):
1179
1270
  for item in items:
@@ -1186,6 +1277,21 @@ class _MapItemsManager:
1186
1277
  sync_client_retries_enabled=self._sync_client_retries_enabled,
1187
1278
  )
1188
1279
 
1280
+ async def add_items_inputplane(self, items: list[api_pb2.MapStartOrContinueItem]):
1281
+ for item in items:
1282
+ # acquire semaphore to limit the number of inputs in progress
1283
+ # (either queued to be sent, waiting for completion, or retrying)
1284
+ if item.attempt_token != "": # if it is a retry item
1285
+ self._item_context[item.input.idx].state = _MapItemState.SENDING
1286
+ continue
1287
+ await self._inputs_outstanding.acquire()
1288
+ self._item_context[item.input.idx] = _MapItemContext(
1289
+ input=item.input.input,
1290
+ retry_manager=RetryManager(self._retry_policy),
1291
+ sync_client_retries_enabled=self._sync_client_retries_enabled,
1292
+ is_input_plane_instance=self._is_input_plane_instance,
1293
+ )
1294
+
1189
1295
  async def prepare_items_for_retry(self, retriable_idxs: list[int]) -> list[api_pb2.FunctionRetryInputsItem]:
1190
1296
  return [await self._item_context[idx].prepare_item_for_retry() for idx in retriable_idxs]
1191
1297
 
@@ -1200,6 +1306,17 @@ class _MapItemsManager:
1200
1306
  if ctx.state == _MapItemState.WAITING_FOR_OUTPUT and ctx.input_jwt.done()
1201
1307
  ]
1202
1308
 
1309
+ def get_input_idxs_waiting_for_output(self) -> list[tuple[int, str]]:
1310
+ """
1311
+ Returns a list of input_idxs for inputs that are waiting for output.
1312
+ """
1313
+ # Idx doesn't need a future because it is set by client and not server.
1314
+ return [
1315
+ (idx, ctx.input_jwt.result())
1316
+ for idx, ctx in self._item_context.items()
1317
+ if ctx.state == _MapItemState.WAITING_FOR_OUTPUT and ctx.input_jwt.done()
1318
+ ]
1319
+
1203
1320
  def _remove_item(self, item_idx: int):
1204
1321
  del self._item_context[item_idx]
1205
1322
  self._inputs_outstanding.release()
@@ -1207,6 +1324,18 @@ class _MapItemsManager:
1207
1324
  def get_item_context(self, item_idx: int) -> _MapItemContext:
1208
1325
  return self._item_context.get(item_idx)
1209
1326
 
1327
+ def handle_put_continue_response(
1328
+ self,
1329
+ items: list[tuple[int, str]], # idx, input_jwt
1330
+ ):
1331
+ for index, item in items:
1332
+ ctx = self._item_context.get(index, None)
1333
+ # If the context is None, then get_all_outputs() has already received a successful
1334
+ # output, and deleted the context. This happens if FunctionGetOutputs completes
1335
+ # before MapStartOrContinueResponse is received.
1336
+ if ctx is not None:
1337
+ ctx.handle_map_start_or_continue_response(item)
1338
+
1210
1339
  def handle_put_inputs_response(self, items: list[api_pb2.FunctionPutInputsResponseItem]):
1211
1340
  for item in items:
1212
1341
  ctx = self._item_context.get(item.idx, None)
@@ -1226,6 +1355,16 @@ class _MapItemsManager:
1226
1355
  if ctx is not None:
1227
1356
  ctx.handle_retry_response(input_jwt)
1228
1357
 
1358
+ async def handle_check_inputs_response(self, response: list[tuple[int, bool]]):
1359
+ for idx, lost in response:
1360
+ ctx = self._item_context.get(idx, None)
1361
+ if ctx is not None:
1362
+ if lost:
1363
+ ctx.state = _MapItemState.WAITING_TO_RETRY
1364
+ retry_item = await ctx.create_map_start_or_continue_item(idx)
1365
+ _ = ctx.retry_manager.get_delay_ms() # increment retry count but instant retry for lost inputs
1366
+ await self._retry_queue.put(time.time(), retry_item)
1367
+
1229
1368
  async def handle_get_outputs_response(self, item: api_pb2.FunctionGetOutputsItem, now_seconds: int) -> _OutputType:
1230
1369
  ctx = self._item_context.get(item.idx, None)
1231
1370
  if ctx is None:
modal/parallel_map.pyi CHANGED
@@ -83,11 +83,7 @@ def _map_invocation_inputplane(
83
83
 
84
84
  This is analogous to `_map_invocation`, but instead of the control-plane
85
85
  `FunctionMap` / `FunctionPutInputs` / `FunctionGetOutputs` RPCs it speaks
86
- the input-plane protocol consisting of `MapStartOrContinue` and `MapAwait`.
87
-
88
- The implementation purposefully ignores retry handling for now - a stub is
89
- left in place so that a future change can add support for the retry path
90
- without re-structuring the surrounding code.
86
+ the input-plane protocol consisting of `MapStartOrContinue`, `MapAwait`, and `MapCheckInputs`.
91
87
  """
92
88
  ...
93
89
 
@@ -281,10 +277,12 @@ class _MapItemContext:
281
277
  input: modal_proto.api_pb2.FunctionInput,
282
278
  retry_manager: modal.retries.RetryManager,
283
279
  sync_client_retries_enabled: bool,
280
+ is_input_plane_instance: bool = False,
284
281
  ):
285
282
  """Initialize self. See help(type(self)) for accurate signature."""
286
283
  ...
287
284
 
285
+ def handle_map_start_or_continue_response(self, attempt_token: str): ...
288
286
  def handle_put_inputs_response(self, item: modal_proto.api_pb2.FunctionPutInputsResponseItem): ...
289
287
  async def handle_get_outputs_response(
290
288
  self,
@@ -301,6 +299,7 @@ class _MapItemContext:
301
299
 
302
300
  async def prepare_item_for_retry(self) -> modal_proto.api_pb2.FunctionRetryInputsItem: ...
303
301
  def handle_retry_response(self, input_jwt: str): ...
302
+ async def create_map_start_or_continue_item(self, idx: int) -> modal_proto.api_pb2.MapStartOrContinueItem: ...
304
303
 
305
304
  class _MapItemsManager:
306
305
  def __init__(
@@ -310,11 +309,13 @@ class _MapItemsManager:
310
309
  retry_queue: modal._utils.async_utils.TimestampPriorityQueue,
311
310
  sync_client_retries_enabled: bool,
312
311
  max_inputs_outstanding: int,
312
+ is_input_plane_instance: bool = False,
313
313
  ):
314
314
  """Initialize self. See help(type(self)) for accurate signature."""
315
315
  ...
316
316
 
317
317
  async def add_items(self, items: list[modal_proto.api_pb2.FunctionPutInputsItem]): ...
318
+ async def add_items_inputplane(self, items: list[modal_proto.api_pb2.MapStartOrContinueItem]): ...
318
319
  async def prepare_items_for_retry(
319
320
  self, retriable_idxs: list[int]
320
321
  ) -> list[modal_proto.api_pb2.FunctionRetryInputsItem]: ...
@@ -322,10 +323,16 @@ class _MapItemsManager:
322
323
  """Returns a list of input_jwts for inputs that are waiting for output."""
323
324
  ...
324
325
 
326
+ def get_input_idxs_waiting_for_output(self) -> list[tuple[int, str]]:
327
+ """Returns a list of input_idxs for inputs that are waiting for output."""
328
+ ...
329
+
325
330
  def _remove_item(self, item_idx: int): ...
326
331
  def get_item_context(self, item_idx: int) -> _MapItemContext: ...
332
+ def handle_put_continue_response(self, items: list[tuple[int, str]]): ...
327
333
  def handle_put_inputs_response(self, items: list[modal_proto.api_pb2.FunctionPutInputsResponseItem]): ...
328
334
  def handle_retry_response(self, input_jwts: list[str]): ...
335
+ async def handle_check_inputs_response(self, response: list[tuple[int, bool]]): ...
329
336
  async def handle_get_outputs_response(
330
337
  self, item: modal_proto.api_pb2.FunctionGetOutputsItem, now_seconds: int
331
338
  ) -> _OutputType: ...
modal/queue.py CHANGED
@@ -20,6 +20,7 @@ from ._utils.async_utils import TaskContext, synchronize_api, warn_if_generator_
20
20
  from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
21
21
  from ._utils.grpc_utils import retry_transient_errors
22
22
  from ._utils.name_utils import check_object_name
23
+ from ._utils.time_utils import timestamp_to_localized_dt
23
24
  from .client import _Client
24
25
  from .exception import InvalidError, RequestSizeError
25
26
 
@@ -260,7 +261,7 @@ class _Queue(_Object, type_prefix="qu"):
260
261
  creation_info = metadata.creation_info
261
262
  return QueueInfo(
262
263
  name=metadata.name or None,
263
- created_at=datetime.fromtimestamp(creation_info.created_at),
264
+ created_at=timestamp_to_localized_dt(creation_info.created_at),
264
265
  created_by=creation_info.created_by or None,
265
266
  )
266
267
 
modal/secret.py CHANGED
@@ -16,6 +16,7 @@ from ._utils.async_utils import synchronize_api
16
16
  from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
17
17
  from ._utils.grpc_utils import retry_transient_errors
18
18
  from ._utils.name_utils import check_object_name
19
+ from ._utils.time_utils import timestamp_to_localized_dt
19
20
  from .client import _Client
20
21
  from .exception import InvalidError, NotFoundError
21
22
 
@@ -299,7 +300,7 @@ class _Secret(_Object, type_prefix="st"):
299
300
  creation_info = metadata.creation_info
300
301
  return SecretInfo(
301
302
  name=metadata.name or None,
302
- created_at=datetime.fromtimestamp(creation_info.created_at),
303
+ created_at=timestamp_to_localized_dt(creation_info.created_at),
303
304
  created_by=creation_info.created_by or None,
304
305
  )
305
306
 
modal/volume.py CHANGED
@@ -55,6 +55,7 @@ from ._utils.deprecation import deprecation_warning, warn_if_passing_namespace
55
55
  from ._utils.grpc_utils import retry_transient_errors
56
56
  from ._utils.http_utils import ClientSessionRegistry
57
57
  from ._utils.name_utils import check_object_name
58
+ from ._utils.time_utils import timestamp_to_localized_dt
58
59
  from .client import _Client
59
60
  from .config import logger
60
61
 
@@ -364,7 +365,7 @@ class _Volume(_Object, type_prefix="vo"):
364
365
  creation_info = metadata.creation_info
365
366
  return VolumeInfo(
366
367
  name=metadata.name or None,
367
- created_at=datetime.fromtimestamp(creation_info.created_at) if creation_info.created_at else None,
368
+ created_at=timestamp_to_localized_dt(creation_info.created_at),
368
369
  created_by=creation_info.created_by or None,
369
370
  )
370
371
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: modal
3
- Version: 1.1.1.dev38
3
+ Version: 1.1.1.dev40
4
4
  Summary: Python client library for Modal
5
5
  Author-email: Modal Labs <support@modal.com>
6
6
  License: Apache-2.0
@@ -3,11 +3,11 @@ modal/__main__.py,sha256=uBjSb_5cdlxmr9AwLkznYsW2tGEJRBjEcCGILvgR_1s,2844
3
3
  modal/_clustered_functions.py,sha256=zmrKbptRbqp4euS3LWncKaLXb8Kjj4YreusOzpEpRMk,2856
4
4
  modal/_clustered_functions.pyi,sha256=_wtFjWocGf1WgI-qYBpbJPArNkg2H9JV7BVaGgMesEQ,1103
5
5
  modal/_container_entrypoint.py,sha256=1qBMNY_E9ICC_sRCtillMxmKPsmxJl1J0_qOAG8rH-0,28288
6
- modal/_functions.py,sha256=MVQEkih76OLsmy-QbphuyTH1gien8hTwjGs3Jxwx6cA,82826
6
+ modal/_functions.py,sha256=n4Tjvf2Gw4DMYw5-O4OUeTUt2GucbuK3RA1orseQJEc,82727
7
7
  modal/_ipython.py,sha256=TW1fkVOmZL3YYqdS2YlM1hqpf654Yf8ZyybHdBnlhSw,301
8
8
  modal/_location.py,sha256=joiX-0ZeutEUDTrrqLF1GHXCdVLF-rHzstocbMcd_-k,366
9
9
  modal/_object.py,sha256=nCkQeLibSuvVAEIheGaLnUfN5PIh1CGpJCnzPIXymGY,11563
10
- modal/_output.py,sha256=T7CRq90W09d-WD4ko7T4PBe26JNeAXE1-8HNO9xpNPI,25787
10
+ modal/_output.py,sha256=G9CeSQEBzjhveWWEzWmYa5Uwbu4lZf8N8IFH1UM4fU0,25803
11
11
  modal/_partial_function.py,sha256=B1J4S9W-La0NHaVmY1aCuH0E3QxJHIX6ZWY5eNTQ7io,37142
12
12
  modal/_pty.py,sha256=JZfPDDpzqICZqtyPI_oMJf_9w-p_lLNuzHhwhodUXio,1329
13
13
  modal/_resolver.py,sha256=2RWvm34cNSnbv1v7izJMNZgfvpLDD6LzaBlr0lIrLnY,7364
@@ -22,7 +22,7 @@ modal/app.py,sha256=kpq4kXp7pch688y6g55QYAC10wqPTU5FXKoWPMirA3E,47899
22
22
  modal/app.pyi,sha256=-jKXlGDBWRPVsuenBhdMRqawK-L2eiJ7gHbmSblhltg,43525
23
23
  modal/call_graph.py,sha256=1g2DGcMIJvRy-xKicuf63IVE98gJSnQsr8R_NVMptNc,2581
24
24
  modal/client.py,sha256=kyAIVB3Ay-XKJizQ_1ufUFB__EagV0MLmHJpyYyJ7J0,18636
25
- modal/client.pyi,sha256=2gT-xgWVAmS-XdFWtLiQSI42V8JiZuRagrBadMPN9FA,15831
25
+ modal/client.pyi,sha256=PNk58EEgTKJjy4hA9vN9sClYMF9gkjfbuFmCfJ3Ujfs,15831
26
26
  modal/cloud_bucket_mount.py,sha256=YOe9nnvSr4ZbeCn587d7_VhE9IioZYRvF9VYQTQux08,5914
27
27
  modal/cloud_bucket_mount.pyi,sha256=-qSfYAQvIoO_l2wsCCGTG5ZUwQieNKXdAO00yP1-LYU,7394
28
28
  modal/cls.py,sha256=7A0xGnugQzm8dOfnKMjLjtqekRlRtQ0jPFRYgq6xdUM,40018
@@ -30,7 +30,7 @@ modal/cls.pyi,sha256=_tZ5qrlL-ZDEcD-mf9BZkkNH5XPr4SmGTEQ-RVmqF3I,27772
30
30
  modal/config.py,sha256=FqVewLPVVR4feq_46JBENiCzqTuXKpnvQZxaeWbS39g,12009
31
31
  modal/container_process.py,sha256=XkPwNIW-iD_GB9u9yqv9q8y-i5cQ8eBbLZZ_GvEw9t8,6858
32
32
  modal/container_process.pyi,sha256=9m-st3hCUlNN1GOTctfPPvIvoLtEl7FbuGWwif5-7YU,6037
33
- modal/dict.py,sha256=IyhKwQPM-HX10ZT-0ouSxpM-oAWqrT5waXFHmunmtyo,15804
33
+ modal/dict.py,sha256=IWpPQtBwR96TJN7ogpIZvL9Ge9rxY4KJ2CjkUKfWr6g,15864
34
34
  modal/dict.pyi,sha256=vUrNmCKWZqiPIQSdbMT6fCq9q1QV3qkGVdz2B_yld34,22578
35
35
  modal/environments.py,sha256=gHFNLG78bqgizpQ4w_elz27QOqmcgAonFsmLs7NjUJ4,6804
36
36
  modal/environments.pyi,sha256=9-KtrzAcUe55cCP4020lSUD7-fWS7OPakAHssq4-bro,4219
@@ -52,14 +52,14 @@ modal/network_file_system.pyi,sha256=Td_IobHr84iLo_9LZKQ4tNdUB60yjX8QWBaFiUvhfi8
52
52
  modal/object.py,sha256=bTeskuY8JFrESjU4_UL_nTwYlBQdOLmVaOX3X6EMxsg,164
53
53
  modal/object.pyi,sha256=sgbaq_d3QSmnPKg5jRbMG3dOceKs0l54kHUAhAyZKAE,6796
54
54
  modal/output.py,sha256=q4T9uHduunj4NwY-YSwkHGgjZlCXMuJbfQ5UFaAGRAc,1968
55
- modal/parallel_map.py,sha256=-9nS9s1jbx1Iqh_5HQRK4xTdhnXF4AGIXwT4UGJ8R78,52666
56
- modal/parallel_map.pyi,sha256=fCugFsGup4Cflesb10_uR-nt5_eguuvhvtvavus_F98,11186
55
+ modal/parallel_map.py,sha256=qZjvo33YAifqCVGz-d_PCRhA70sAF01EbqxQHBAdVsg,59293
56
+ modal/parallel_map.pyi,sha256=T2HsEJVYT0KpDy8kqGz98WgH3HnIqz4kvIhZXs7c3Dw,11724
57
57
  modal/partial_function.py,sha256=aIdlGfTjjgqY6Fpr-biCjvRU9W542_S5N2xkNN_rYGM,1127
58
58
  modal/partial_function.pyi,sha256=lqqOzZ9-QvHTDWKQ_oAYYOvsXgTOBKhO9u-RI98JbUk,13986
59
59
  modal/proxy.py,sha256=NQJJMGo-D2IfmeU0vb10WWaE4oTLcuf9jTeEJvactOg,1446
60
60
  modal/proxy.pyi,sha256=yWGWwADCRGrC2w81B7671UTH4Uv3HMZKy5vVqlJUZoA,1417
61
61
  modal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
- modal/queue.py,sha256=19Xri5HzY0VgOxWMZ4y_mNco8jhNgn9klegXrSHWmGc,20306
62
+ modal/queue.py,sha256=6oKACWiFus03vx5-cDxowxX80a1J7sAwNpBBHLJM8sk,20366
63
63
  modal/queue.pyi,sha256=Pv4OtY7j17Yb89HGKaMQRiIv0yol-aV-ZtelxQ9GrlU,28330
64
64
  modal/retries.py,sha256=IvNLDM0f_GLUDD5VgEDoN09C88yoxSrCquinAuxT1Sc,5205
65
65
  modal/runner.py,sha256=ostdzYpQb-20tlD6dIq7bpWTkZkOhjJBNuMNektqnJA,24068
@@ -69,7 +69,7 @@ modal/sandbox.py,sha256=eQd0Cf9yTFCNshnj7oH8WvecbhVIwsEsmuXB9O-REis,40927
69
69
  modal/sandbox.pyi,sha256=_ddnvZGauSRG-WelsMB5oPil8KVWb0PSvmuAzAzrLIw,41713
70
70
  modal/schedule.py,sha256=ng0g0AqNY5GQI9KhkXZQ5Wam5G42glbkqVQsNpBtbDE,3078
71
71
  modal/scheduler_placement.py,sha256=BAREdOY5HzHpzSBqt6jDVR6YC_jYfHMVqOzkyqQfngU,1235
72
- modal/secret.py,sha256=UaUmwYmT52VarDh92b0QzAa8_BNUAgBs-wE4eMQ6-B8,11967
72
+ modal/secret.py,sha256=_d_OQUE1S0v_wO5Ck728dsC_1v8-B_4ku4TS4BgC4Bc,12027
73
73
  modal/secret.pyi,sha256=zcC_OM0JzIF1ccnhNvVIlL6sY3xVjq3t0s3fE1ZDDVs,9732
74
74
  modal/serving.py,sha256=3I3WBeVbzZY258u9PXBCW_dZBgypq3OhwBuTVvlgubE,4423
75
75
  modal/serving.pyi,sha256=YfixTaWikyYpwhnNxCHMZnDDQiPmV1xJ87QF91U_WGU,1924
@@ -78,7 +78,7 @@ modal/snapshot.pyi,sha256=0q83hlmWxAhDu8xwZyL5VmYh0i8Tigf7S60or2k30L8,1682
78
78
  modal/stream_type.py,sha256=A6320qoAAWhEfwOCZfGtymQTu5AfLfJXXgARqooTPvY,417
79
79
  modal/token_flow.py,sha256=GWpar0gANs71vm9Bd_Cj87UG1K3ljTURbkEjG3JLsrY,7616
80
80
  modal/token_flow.pyi,sha256=eirYjyqbRiT3GCKMIPHJPpkvBTu8WxDKqSHehWaJI_4,2533
81
- modal/volume.py,sha256=b3VYD-0D8rZWxoeIIYpjqm2lgxrVOjFEgTyW2btUVnw,45396
81
+ modal/volume.py,sha256=6sMyykbz9lvwzClAOW9Pdbl9naXo9CipYf65t-eJdrs,45418
82
82
  modal/volume.pyi,sha256=lMXzeyeC85ji8g2j0Ghy1WQrk2A2J0LPVpLFpabbr6A,41933
83
83
  modal/_runtime/__init__.py,sha256=MIEP8jhXUeGq_eCjYFcqN5b1bxBM4fdk0VESpjWR0fc,28
84
84
  modal/_runtime/asgi.py,sha256=_2xSTsDD27Cit7xnMs4lzkJA2wzer2_N4Oa3BkXFzVA,22521
@@ -91,7 +91,7 @@ modal/_runtime/telemetry.py,sha256=T1RoAGyjBDr1swiM6pPsGRSITm7LI5FDK18oNXxY08U,5
91
91
  modal/_runtime/user_code_imports.py,sha256=78wJyleqY2RVibqcpbDQyfWVBVT9BjyHPeoV9WdwV5Y,17720
92
92
  modal/_utils/__init__.py,sha256=waLjl5c6IPDhSsdWAm9Bji4e2PVxamYABKAze6CHVXY,28
93
93
  modal/_utils/app_utils.py,sha256=88BT4TPLWfYAQwKTHcyzNQRHg8n9B-QE2UyJs96iV-0,108
94
- modal/_utils/async_utils.py,sha256=ot8NiPGZ5bRJhY5ilZyjNgx24VI-1BIpCu054oLHDf0,29556
94
+ modal/_utils/async_utils.py,sha256=7uA4KJV7XRgak5nXZSGRE-RN1h91UOyNwK6v_ilUQMQ,29737
95
95
  modal/_utils/auth_token_manager.py,sha256=i-kfLgDd4BMAw6wouO5aKfNGHo27VAZoVOsbEWqDr2I,5252
96
96
  modal/_utils/blob_utils.py,sha256=bySVr9M7hlFzZo-u4ikovxMdcdEE8yfGOs94Zex2k4o,20913
97
97
  modal/_utils/bytes_io_segment_payload.py,sha256=vaXPq8b52-x6G2hwE7SrjS58pg_aRm7gV3bn3yjmTzQ,4261
@@ -111,7 +111,7 @@ modal/_utils/package_utils.py,sha256=LcL2olGN4xaUzu2Tbv-C-Ft9Qp6bsLxEfETOAVd-mjU
111
111
  modal/_utils/pattern_utils.py,sha256=ZUffaECfe2iYBhH6cvCB-0-UWhmEBTZEl_TwG_So3ag,6714
112
112
  modal/_utils/rand_pb_testing.py,sha256=mmVPk1rZldHwHZx0DnHTuHQlRLAiiAYdxjwEJpxvT9c,3900
113
113
  modal/_utils/shell_utils.py,sha256=hWHzv730Br2Xyj6cGPiMZ-198Z3RZuOu3pDXhFSZ22c,2157
114
- modal/_utils/time_utils.py,sha256=THhRz59gez8jNV1B_eNS2gJJVPPGQSFVlr1esBGQoqg,494
114
+ modal/_utils/time_utils.py,sha256=Un_nCG9ZXPMPKK5kJayrFVl1eFckVikPyqrWtI2553M,553
115
115
  modal/_vendor/__init__.py,sha256=MIEP8jhXUeGq_eCjYFcqN5b1bxBM4fdk0VESpjWR0fc,28
116
116
  modal/_vendor/a2wsgi_wsgi.py,sha256=Q1AsjpV_Q_vzQsz_cSqmP9jWzsGsB-ARFU6vpQYml8k,21878
117
117
  modal/_vendor/cloudpickle.py,sha256=avxOIgNKqL9KyPNuIOVQzBm0D1l9ipeB4RrcUMUGmeQ,55216
@@ -127,23 +127,23 @@ modal/builder/base-images.json,sha256=JYSDAgHTl-WrV_TZW5icY-IJEnbe2eQ4CZ_KN6EOZK
127
127
  modal/cli/__init__.py,sha256=6FRleWQxBDT19y7OayO4lBOzuL6Bs9r0rLINYYYbHwQ,769
128
128
  modal/cli/_download.py,sha256=tV8JFkncTtQKh85bSguQg6AW5aRRlynf-rvyN7ruigc,4337
129
129
  modal/cli/_traceback.py,sha256=IKj9xtc6LjAxyhGJWolNIXEX3MhAIulnRqywZNOFmkU,7324
130
- modal/cli/app.py,sha256=Q4yoPGuNqdWMwIIbjJQflp9RvmgNQQRWBNhCg_Cvi9g,7800
131
- modal/cli/cluster.py,sha256=GZniNlzH1QMlHNpzGurfEdxNqwaH0BznMc4i-j7uhe4,3152
130
+ modal/cli/app.py,sha256=rbuAG92my-1eZN0olk6p2eD4oBnyBliUsrCOUW-U-9k,7832
131
+ modal/cli/cluster.py,sha256=8pQurDUvLP_HdSeHH5ZB6WIoDh48FR8qP9vGOtSsFXI,3168
132
132
  modal/cli/config.py,sha256=lhp2Pq4RbTDhaZJ-ZJvhrMqJj8c-WjuRX6gjE3TrvXc,1691
133
- modal/cli/container.py,sha256=mRYRCGsP6DiWzm3Az4W5Fcc5Tbl58zOIc62HDzS9TvQ,3703
134
- modal/cli/dict.py,sha256=_UiF8G2aRJwZDNSFYiHFt_Xy8K9Gtrx4qKzbRgHri5I,4639
133
+ modal/cli/container.py,sha256=9Ti-TIZ6vjDSmn9mk9h6SRwyhkQjtwirBN18LjpLyvE,3719
134
+ modal/cli/dict.py,sha256=XsySnxSOcfF9ZehHO3whRgFuxZGGNE_I87Hiye36wE4,4655
135
135
  modal/cli/entry_point.py,sha256=M9ZeIsYx7rxdc6XP2iOIptVzmpj39D3rU8nfW7Dc3CQ,4388
136
136
  modal/cli/environment.py,sha256=Ayddkiq9jdj3XYDJ8ZmUqFpPPH8xajYlbexRkzGtUcg,4334
137
137
  modal/cli/import_refs.py,sha256=X59Z5JwgliRO6C-cIFto2Pr7o3SwlZMKQPKA0aI4ZK4,13927
138
138
  modal/cli/launch.py,sha256=0_sBu6bv2xJEPWi-rbGS6Ri9ggnkWQvrGlgpYSUBMyY,3097
139
- modal/cli/network_file_system.py,sha256=hj_Cfh-xjz9uWdrXrZxxPScepaZYZjJ7aHa5B93pBGM,8110
139
+ modal/cli/network_file_system.py,sha256=I9IqTpVfk32uKYwGd8LTldkQx6UKYrQYNZ26q7Ab5Oo,8126
140
140
  modal/cli/profile.py,sha256=r5hnA_GPe_2zwgv6n0Mi8XQXyejQgShb17yjD4dPXcw,3212
141
- modal/cli/queues.py,sha256=As6WXkkqWbicu52uljqbmC86woejcpTdzSjU-hYpU24,4549
141
+ modal/cli/queues.py,sha256=6Ck7B-Eu3ZEV6oOX0GxnVMQ5k9DWrIZPYXxaHVqUhKU,4565
142
142
  modal/cli/run.py,sha256=96m6fpJKbjtva4xzJut0pxS36Z5WCMq0umpAry96im0,24946
143
- modal/cli/secret.py,sha256=bxp4qeooYUgKoSOgiBC86lw_5niZhw56tSzX7tuICGQ,6622
143
+ modal/cli/secret.py,sha256=PqAvgaiIHTYDbXsKLLQcDvwQn283QnmtgfIr5jH4Prw,6646
144
144
  modal/cli/token.py,sha256=NAmQzKBfEHkcldWKeFxAVIqQBoo1RTp7_A4yc7-8qM0,1911
145
145
  modal/cli/utils.py,sha256=aUXDU9_VgcJrGaGRy4bGf4dqwKYXHCpoO27x4m_bpuo,3293
146
- modal/cli/volume.py,sha256=Ju9Hy6UkSe4XGThuMOGSFq9Ba-t10rx8YqXM5Po7sP0,10961
146
+ modal/cli/volume.py,sha256=L4ryL-_yjgUyr0Zwy390DYsFCLtMxeOW2elm7J76y7w,10985
147
147
  modal/cli/programs/__init__.py,sha256=svYKtV8HDwDCN86zbdWqyq5T8sMdGDj0PVlzc2tIxDM,28
148
148
  modal/cli/programs/run_jupyter.py,sha256=44Lpvqk2l3hH-uOkmAOzw60NEsfB5uaRDWDKVshvQhs,2682
149
149
  modal/cli/programs/vscode.py,sha256=KbTAaIXyQBVCDXxXjmBHmKpgXkUw0q4R4KkJvUjCYgk,3380
@@ -151,7 +151,7 @@ modal/experimental/__init__.py,sha256=nuc7AL4r_Fs08DD5dciWFZhrV1nanwoClOfdTcudU0
151
151
  modal/experimental/flash.py,sha256=viXQumCIFp5VFsPFURdFTBTjP_QnsAi8nSWXAMmfjeQ,19744
152
152
  modal/experimental/flash.pyi,sha256=A8_qJGtGoXEzKDdHbvhmCw7oqfneFEvJQK3ZdTOvUdU,10830
153
153
  modal/experimental/ipython.py,sha256=TrCfmol9LGsRZMeDoeMPx3Hv3BFqQhYnmD_iH0pqdhk,2904
154
- modal-1.1.1.dev38.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
154
+ modal-1.1.1.dev40.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
155
155
  modal_docs/__init__.py,sha256=svYKtV8HDwDCN86zbdWqyq5T8sMdGDj0PVlzc2tIxDM,28
156
156
  modal_docs/gen_cli_docs.py,sha256=c1yfBS_x--gL5bs0N4ihMwqwX8l3IBWSkBAKNNIi6bQ,3801
157
157
  modal_docs/gen_reference_docs.py,sha256=d_CQUGQ0rfw28u75I2mov9AlS773z9rG40-yq5o7g2U,6359
@@ -174,10 +174,10 @@ modal_proto/options_pb2.pyi,sha256=l7DBrbLO7q3Ir-XDkWsajm0d0TQqqrfuX54i4BMpdQg,1
174
174
  modal_proto/options_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
175
175
  modal_proto/options_pb2_grpc.pyi,sha256=CImmhxHsYnF09iENPoe8S4J-n93jtgUYD2JPAc0yJSI,247
176
176
  modal_proto/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
177
- modal_version/__init__.py,sha256=ZJUyKWUb4kha2XnqTQCiSenTliHEzKuLWSwDrkH8iqQ,121
177
+ modal_version/__init__.py,sha256=yKVPZgQsYtXUtD_7xH36mRiDGgXyJAA7FA7cDCXtMQI,121
178
178
  modal_version/__main__.py,sha256=2FO0yYQQwDTh6udt1h-cBnGd1c4ZyHnHSI4BksxzVac,105
179
- modal-1.1.1.dev38.dist-info/METADATA,sha256=DfDzvLTc_tYh0IubHK1YY3YWgYsRtuBpsIBoDT-Ky6A,2460
180
- modal-1.1.1.dev38.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
181
- modal-1.1.1.dev38.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
182
- modal-1.1.1.dev38.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
183
- modal-1.1.1.dev38.dist-info/RECORD,,
179
+ modal-1.1.1.dev40.dist-info/METADATA,sha256=G_cZZm2OijpQdzkE97eYr1DIr8doI9Q1_4VSQsl37rA,2460
180
+ modal-1.1.1.dev40.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
181
+ modal-1.1.1.dev40.dist-info/entry_points.txt,sha256=An-wYgeEUnm6xzrAP9_NTSTSciYvvEWsMZILtYrvpAI,46
182
+ modal-1.1.1.dev40.dist-info/top_level.txt,sha256=4BWzoKYREKUZ5iyPzZpjqx4G8uB5TWxXPDwibLcVa7k,43
183
+ modal-1.1.1.dev40.dist-info/RECORD,,
modal_version/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # Copyright Modal Labs 2025
2
2
  """Supplies the current version of the modal client library."""
3
3
 
4
- __version__ = "1.1.1.dev38"
4
+ __version__ = "1.1.1.dev40"