coiled 1.118.4.dev6__py3-none-any.whl → 1.129.3.dev10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
coiled/software_utils.py CHANGED
@@ -55,6 +55,7 @@ subdir_datas = {}
55
55
 
56
56
  ANY_AVAILABLE = "ANY-AVAILABLE"
57
57
  AUTH_BEARER_USERNAME = "AUTH_BEARER_TOKEN"
58
+ CONDA_TOKEN_USERNAME = "CONDA_TOKEN"
58
59
  COILED_LOCAL_PACKAGE_PREFIX = "coiled_local_"
59
60
  DEFAULT_JSON_PYPI_URL = "https://pypi.org/pypi"
60
61
  DEFAULT_PYPI_URL = "https://pypi.org/simple"
@@ -571,9 +572,14 @@ def get_mamba_auth_dict(home_dir: Path | None = None) -> dict[str, tuple[str, st
571
572
  if auth_file.exists():
572
573
  with auth_file.open("r") as f:
573
574
  auth_data = json.load(f)
575
+ if not isinstance(auth_data, dict):
576
+ logger.debug(f"Mamba auth file {auth_file} does not contain a dictionary at top level")
577
+ return domain_auth
574
578
  for domain, auth in auth_data.items():
575
579
  auth_type = auth.get("type")
576
- if auth_type == "CondaToken" or auth_type == "BearerToken":
580
+ if auth_type == "CondaToken":
581
+ domain_auth[domain] = (CONDA_TOKEN_USERNAME, auth["token"])
582
+ elif auth_type == "BearerToken":
577
583
  domain_auth[domain] = (AUTH_BEARER_USERNAME, auth["token"])
578
584
  elif auth_type == "BasicHTTPAuthentication":
579
585
  domain_auth[domain] = (
@@ -592,6 +598,129 @@ def get_mamba_auth(netloc: str) -> tuple[str, str] | None:
592
598
  return get_mamba_auth_dict().get(netloc, None)
593
599
 
594
600
 
601
+ def _parse_rattler_auth_data(auth_data: dict) -> tuple[str, str] | None:
602
+ """Parse rattler authentication data into username/password tuple.
603
+
604
+ Handles rattler Authentication variants:
605
+ - {"BearerToken": "token"}
606
+ - {"CondaToken": "token"}
607
+ - {"BasicHTTP": {"username": "user", "password": "pass"}}
608
+
609
+ Returns:
610
+ Tuple of (username, password) or None if unknown auth type
611
+ """
612
+ if "BearerToken" in auth_data:
613
+ return (AUTH_BEARER_USERNAME, auth_data["BearerToken"])
614
+ elif "CondaToken" in auth_data:
615
+ return (CONDA_TOKEN_USERNAME, auth_data["CondaToken"])
616
+ elif "BasicHTTP" in auth_data:
617
+ basic_auth = auth_data["BasicHTTP"]
618
+ return (
619
+ basic_auth.get("username", ""),
620
+ basic_auth.get("password", ""),
621
+ )
622
+ else:
623
+ return None
624
+
625
+
626
+ @functools.lru_cache
627
+ def get_rattler_auth_dict(home_dir: Path | None = None) -> dict[str, tuple[str, str]]:
628
+ # RATTLER_AUTH_FILE is an env var that can override the default location
629
+ env_auth_file = os.environ.get("RATTLER_AUTH_FILE")
630
+ if env_auth_file:
631
+ auth_file = Path(env_auth_file)
632
+ else:
633
+ if home_dir is None:
634
+ home_dir = Path.home()
635
+ auth_file = home_dir / ".rattler" / "credentials.json"
636
+ domain_auth = {}
637
+ if auth_file.exists():
638
+ with auth_file.open("r") as f:
639
+ auth_data = json.load(f)
640
+ if not isinstance(auth_data, dict):
641
+ logger.debug(f"Rattler auth file {auth_file} does not contain a dictionary at top level")
642
+ return domain_auth
643
+ for domain, auth in auth_data.items():
644
+ parsed_auth = _parse_rattler_auth_data(auth)
645
+ if parsed_auth:
646
+ domain_auth[domain] = parsed_auth
647
+ else:
648
+ logger.debug(f"Encountered unknown rattler auth type {list(auth.keys())} for domain {domain}")
649
+ return domain_auth
650
+
651
+
652
+ def get_rattler_keyring_auth(netloc: str) -> tuple[str, str] | None:
653
+ """Returns the Requests tuple auth for a given domain from rattler keyring storage."""
654
+ if not HAVE_KEYRING:
655
+ logger.debug("keyring not available, skipping rattler keyring auth")
656
+ return None
657
+
658
+ def try_keyring_auth(host: str) -> tuple[str, str] | None:
659
+ """Try to get auth from keyring for a specific host using rattler's storage format."""
660
+ try:
661
+ # Use the existing get_keyring_auth function with "rattler" as the URL
662
+ # and the host as the username to get rattler-stored credentials
663
+ auth_parts = get_keyring_auth("rattler", host)
664
+ if auth_parts:
665
+ username, password = auth_parts
666
+ if password:
667
+ try:
668
+ auth_data = json.loads(password)
669
+ parsed_auth = _parse_rattler_auth_data(auth_data)
670
+ if parsed_auth:
671
+ return parsed_auth
672
+ except json.JSONDecodeError:
673
+ # If it's not JSON, treat it as a simple username/password
674
+ return (username or host, password)
675
+
676
+ except Exception as e:
677
+ logger.debug(f"Error getting rattler keyring auth for {host}: {e}")
678
+ return None
679
+
680
+ return None
681
+
682
+ # Try exact match first
683
+ auth_parts = try_keyring_auth(netloc)
684
+ if auth_parts:
685
+ logger.debug(f"Found rattler keyring auth for {netloc}")
686
+ return auth_parts
687
+
688
+ # Try parent domain matches if exact match failed
689
+ # If looking for foo.example.com, try example.com (but not com)
690
+ parts = netloc.split(".")
691
+ for i in range(1, len(parts) - 1): # Stop before single TLD
692
+ parent_domain = ".".join(parts[i:])
693
+
694
+ auth_parts = try_keyring_auth(parent_domain)
695
+ if auth_parts:
696
+ logger.debug(f"Found rattler keyring auth for {parent_domain} (matching {netloc})")
697
+ return auth_parts
698
+
699
+ logger.debug(f"No rattler keyring auth found for {netloc}")
700
+ return None
701
+
702
+
703
+ def get_rattler_auth(netloc: str) -> tuple[str, str] | None:
704
+ """Returns the Requests tuple auth for a given domain from rattler keyring or auth file."""
705
+ # Try keyring first (primary storage method for rattler/pixi)
706
+ auth_parts = get_rattler_keyring_auth(netloc)
707
+ if auth_parts:
708
+ return auth_parts
709
+
710
+ # Fall back to file-based storage
711
+ # rattler allows wildcards, so we have to check for exact matches first
712
+ auth_parts = get_rattler_auth_dict().get(netloc, None)
713
+ if auth_parts:
714
+ return auth_parts
715
+
716
+ # then check for wildcard matches in file storage
717
+ for domain, auth in get_rattler_auth_dict().items():
718
+ if domain.startswith("*.") and netloc.endswith(domain[1:]):
719
+ return auth
720
+
721
+ return None
722
+
723
+
595
724
  @functools.lru_cache
596
725
  def get_conda_config() -> dict:
597
726
  """Returns the current conda config as dictionary"""
@@ -706,7 +835,7 @@ print(json.dumps(auth_parts))
706
835
  return None
707
836
 
708
837
  if auth_type == "token":
709
- username = AUTH_BEARER_USERNAME
838
+ username = CONDA_TOKEN_USERNAME
710
839
 
711
840
  if not username and not password:
712
841
  return None
@@ -743,6 +872,7 @@ def set_auth_for_url(url: Url | str) -> str:
743
872
  use_keyring = dask.config.get("coiled.package_sync.conda.cred_sources.keyring", True)
744
873
  use_conda_auth = dask.config.get("coiled.package_sync.conda.cred_sources.conda", True)
745
874
  use_mamba_auth = dask.config.get("coiled.package_sync.conda.cred_sources.mamba", True)
875
+ use_rattler_auth = dask.config.get("coiled.package_sync.conda.cred_sources.rattler", True)
746
876
 
747
877
  no_auth_url = parsed_url._replace(auth=None).url
748
878
  auth_parts = (
@@ -756,10 +886,12 @@ def set_auth_for_url(url: Url | str) -> str:
756
886
  or (get_conda_auth(no_auth_url) if use_conda_auth else None)
757
887
  # mamba could have URL stored by netloc/path or netloc
758
888
  or ((get_mamba_auth(f"{netloc}{path}") or get_mamba_auth(netloc)) if use_mamba_auth else None)
889
+ # rattler/pixi could store netloc or *.netloc in keyring or a fallback file
890
+ or ((get_rattler_auth(netloc) or get_rattler_auth(f"*.{netloc}")) if use_rattler_auth else None)
759
891
  )
760
892
  if auth_parts is not None:
761
893
  username, password = auth_parts
762
- if username == AUTH_BEARER_USERNAME:
894
+ if username == CONDA_TOKEN_USERNAME:
763
895
  # If the username indicates this is a token (which only happens for mamba auth)
764
896
  # the token should be embedded directly in the URL and not in the auth portion
765
897
 
@@ -775,8 +907,11 @@ def set_auth_for_url(url: Url | str) -> str:
775
907
  elif username or password:
776
908
  parsed_url = parsed_url._replace(auth=f"{username or ''}:{password or ''}")
777
909
 
778
- if username and username != AUTH_BEARER_USERNAME and not password:
779
- logger.info(f"No password found for {parsed_url.url}")
910
+ if username and username != CONDA_TOKEN_USERNAME and not password:
911
+ if username == AUTH_BEARER_USERNAME:
912
+ logger.warning(f"No bearer token found for {parsed_url.url}")
913
+ else:
914
+ logger.info(f"No password found for {parsed_url.url}")
780
915
  elif not username:
781
916
  logger.info(f"No username or password found for {parsed_url.url}")
782
917
 
coiled/spans.py CHANGED
@@ -20,6 +20,8 @@ def span(cluster, name: Optional[str] = None, **kwargs):
20
20
  with distributed_span(name or "") as s:
21
21
  yield
22
22
  t1 = time.time()
23
+ if not dask.config.get("coiled.analytics.client-spans.transmit", True):
24
+ return
23
25
  data = {
24
26
  **kwargs,
25
27
  "start": t0,
coiled/types.py CHANGED
@@ -182,8 +182,11 @@ KNOWN_SUBDIR_RE = re.compile(r"(?:/|^)(?:" + "|".join(KNOWN_SUBDIRS) + r")(?:/|$
182
182
 
183
183
 
184
184
  # This function is in this module to prevent circular import issues
185
- def parse_conda_channel(package_name: str, channel: str, subdir: str) -> Tuple[Optional[str], str]:
185
+ def parse_conda_channel(package_name: str, channel: Optional[str], subdir: str) -> Tuple[Optional[str], str]:
186
186
  """Return a channel and channel_url for a conda package with any extra information removed."""
187
+ # Editable packages from pixi may not have a channel
188
+ if not channel:
189
+ return None, ""
187
190
  # Handle unknown channels
188
191
  if channel == "<unknown>":
189
192
  logger.warning(f"Channel for {package_name} is unknown, setting to conda-forge")
@@ -432,10 +435,24 @@ class AWSOptions(BackendOptions, total=False):
432
435
  If possible, this will attempt to put workers in the same cluster placement group (in theory this can
433
436
  result in better network between workers, since they'd be physically close to each other in datacenter,
434
437
  though we haven't seen this to have much benefit in practice).
438
+ use_worker_placement_group:
439
+ Cluster placement group for only the workers, not the scheduler.
440
+ use_efa
441
+ Attach Elastic Fabric Adaptor for faster inter-connect between instances.
442
+ Only some instance types are supported.
443
+ use_worker_efa
444
+ Attach Elastic Fabric Adaptor only on cluster workers, not the scheduler.
445
+ ami_version
446
+ Use non-default type of AMI.
447
+ Supported options include "DL" for the Deep Learning Base OSS Nvidia Driver GPU AMI.
435
448
  """
436
449
 
437
450
  keypair_name: Optional[str]
438
451
  use_placement_group: Optional[bool]
452
+ use_worker_placement_group: Optional[bool]
453
+ use_efa: Optional[bool]
454
+ use_worker_efa: Optional[bool]
455
+ ami_version: Optional[str]
439
456
 
440
457
 
441
458
  class GCPOptions(BackendOptions, total=False):
coiled/utils.py CHANGED
@@ -54,7 +54,10 @@ import rich
54
54
  import urllib3
55
55
  import yaml
56
56
  from dask.distributed import Security
57
- from rich.console import Console
57
+ from rich.align import Align
58
+ from rich.console import Console, Group
59
+ from rich.panel import Panel
60
+ from rich.progress import BarColumn, Progress, TextColumn
58
61
 
59
62
  from coiled.exceptions import (
60
63
  AccountFormatError,
@@ -2120,3 +2123,64 @@ def join_command_parts(command: list[str]):
2120
2123
  return s
2121
2124
 
2122
2125
  return " ".join(quote_if_has_whitespace(part) for part in command)
2126
+
2127
+
2128
+ class SimpleRichProgressPanel(Progress):
2129
+ """
2130
+ Panel with one or more progress bars.
2131
+
2132
+ Basic usage:
2133
+
2134
+ ```python
2135
+ with coiled.utils.SimpleRichProgressPanel.from_defaults(title="Doing stuff...") as progress:
2136
+
2137
+ while ...:
2138
+ foo_complete = ...
2139
+ bar_complete = ...
2140
+
2141
+ # first time you call it adds the bars, subsequent times updates the values
2142
+ progress.update_progress([
2143
+ {"label": "Foo", "total": 123, "completed": foo_complete},
2144
+ {"label": "Bar", "total": 456, "completed": bar_complete},
2145
+ ])
2146
+ ```
2147
+
2148
+ """
2149
+
2150
+ def __init__(self, *args, batch_title: str | Group = "", **kwargs):
2151
+ self.batch_title = batch_title
2152
+ self._tasks_from_dicts = {}
2153
+ super().__init__(*args, **kwargs)
2154
+
2155
+ def get_renderables(self):
2156
+ yield Panel(
2157
+ Group(
2158
+ Align.center(self.batch_title),
2159
+ Align.center(self.make_tasks_table(self.tasks)),
2160
+ )
2161
+ )
2162
+
2163
+ @classmethod
2164
+ def from_defaults(cls, title=""):
2165
+ return cls(
2166
+ TextColumn("[progress.description]{task.description}"),
2167
+ BarColumn(complete_style="progress.remaining"),
2168
+ TextColumn("[progress.percentage]{task.completed}/{task.total}"),
2169
+ console=Console(width=80),
2170
+ batch_title=title,
2171
+ )
2172
+
2173
+ def update_title(self, title):
2174
+ self.batch_title = title
2175
+ self.refresh()
2176
+
2177
+ def update_progress(self, tasks: list[dict]):
2178
+ for task in tasks:
2179
+ if not task:
2180
+ continue
2181
+ if task["label"] not in self._tasks_from_dicts:
2182
+ self._tasks_from_dicts[task["label"]] = self.add_task(task["label"])
2183
+
2184
+ task_kwargs = {key: val for key, val in task.items() if key != "label"}
2185
+ self.update(self._tasks_from_dicts[task["label"]], **task_kwargs)
2186
+ self.refresh()
coiled/v2/cluster.py CHANGED
@@ -207,7 +207,10 @@ class ClusterKwargs(TypedDict, total=False):
207
207
  region: str | None
208
208
  arm: bool | None
209
209
  batch_job_container: str | None
210
+ scheduler_sidecars: list[dict] | None
211
+ worker_sidecars: list[dict] | None
210
212
  pause_on_exit: bool | None
213
+ filestores_to_attach: list[dict] | None
211
214
 
212
215
 
213
216
  class Cluster(DistributedCluster, Generic[IsAsynchronous]):
@@ -467,8 +470,17 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
467
470
  The cloud provider region in which to run the cluster.
468
471
  arm
469
472
  Use ARM instances for cluster; default is x86 (Intel) instances.
473
+ scheduler_sidecars
474
+ Optional list of additional containers to run as sidecars on the scheduler. For example,
475
+ ``scheduler_sidecars=[ {"name": "test", container="foo/foo:latest", command="run_something"} ]`` will
476
+ start the ``foo/foo:latest`` container with ``run_something`` as the command. Note that VM will shut itself
477
+ down once container exits, to sidecar commands are expected to be things that will keep running.
478
+ worker_sidecars
479
+ Like ``scheduler_sidecars``, but run on worker VMs instead of scheduler.
470
480
  pause_on_exit
471
481
  Pause the cluster instead of shutting it down when exiting.
482
+ filestores_to_attach
483
+ List of filestores to attach (specified as ``{"id": id, "input": True, "output": True}``, not name).
472
484
  """
473
485
 
474
486
  _instances = weakref.WeakSet()
@@ -486,7 +498,7 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
486
498
  worker_vm_types: list | None = None,
487
499
  worker_cpu: Union[int, List[int]] | None = None,
488
500
  worker_memory: Union[str, List[str]] | None = None,
489
- worker_disk_size: Union[int, str] | None = None,
501
+ worker_disk_size: int | str | None = None,
490
502
  worker_disk_throughput: int | None = None,
491
503
  worker_disk_config: dict | None = None,
492
504
  worker_gpu: Union[int, bool] | None = None,
@@ -495,7 +507,7 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
495
507
  scheduler_vm_types: list | None = None,
496
508
  scheduler_cpu: Union[int, List[int]] | None = None,
497
509
  scheduler_memory: Union[str, List[str]] | None = None,
498
- scheduler_disk_size: int | None = None,
510
+ scheduler_disk_size: int | str | None = None,
499
511
  scheduler_disk_config: dict | None = None,
500
512
  scheduler_gpu: bool | None = None,
501
513
  asynchronous: bool = False,
@@ -551,7 +563,10 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
551
563
  arm: bool | None = None,
552
564
  batch_job_ids: List[int] | None = None,
553
565
  batch_job_container: str | None = None,
566
+ scheduler_sidecars: list[dict] | None = None,
567
+ worker_sidecars: list[dict] | None = None,
554
568
  pause_on_exit: bool | None = None,
569
+ filestores_to_attach: list[dict] | None = None,
555
570
  ):
556
571
  self.pause_on_exit = pause_on_exit
557
572
  self.init_time = datetime.datetime.now(tz=datetime.timezone.utc)
@@ -696,7 +711,6 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
696
711
  "distributed",
697
712
  "msgpack-python",
698
713
  "msgpack",
699
- "pip",
700
714
  "python",
701
715
  "tornado",
702
716
  ))
@@ -759,6 +773,9 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
759
773
  self.extra_user_container = batch_job_container
760
774
  self.extra_user_container_ignore_entrypoint = False
761
775
 
776
+ self.scheduler_sidecars = scheduler_sidecars
777
+ self.worker_sidecars = worker_sidecars
778
+
762
779
  self.software_environment = software or dask.config.get("coiled.software")
763
780
  self.software_container = container or dask.config.get("coiled.container", None)
764
781
  self.software_use_entrypoint = not ignore_container_entrypoint
@@ -993,6 +1010,8 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
993
1010
  no_client_timeout if no_client_timeout != NO_CLIENT_DEFAULT else (idle_timeout or "2 minutes")
994
1011
  )
995
1012
 
1013
+ self.filestores_to_attach = filestores_to_attach
1014
+
996
1015
  if not self.asynchronous:
997
1016
  # If we don't close the cluster, the user's ipython session gets spammed with
998
1017
  # messages from distributed.
@@ -1599,8 +1618,11 @@ class Cluster(DistributedCluster, Generic[IsAsynchronous]):
1599
1618
  batch_job_ids=self.batch_job_ids,
1600
1619
  extra_user_container=self.extra_user_container,
1601
1620
  extra_user_container_ignore_entrypoint=self.extra_user_container_ignore_entrypoint,
1621
+ scheduler_sidecars=self.scheduler_sidecars,
1622
+ worker_sidecars=self.worker_sidecars,
1602
1623
  host_setup_script_content=self.host_setup_script_content,
1603
1624
  pause_on_exit=self.pause_on_exit,
1625
+ filestores_to_attach=self.filestores_to_attach,
1604
1626
  cluster_timeout_seconds=self.cluster_timeout_seconds,
1605
1627
  )
1606
1628
  cluster_created = not cluster_existed
@@ -0,0 +1,72 @@
1
+ from __future__ import annotations
2
+
3
+ from coiled.v2.states import ProcessStateEnum
4
+
5
+
6
+ def get_cluster_connection_info(
7
+ cluster_id: int,
8
+ cloud,
9
+ *,
10
+ use_scheduler_public_ip: bool = True,
11
+ ) -> tuple[str, dict]:
12
+ """
13
+ Get the comms info we need to connect to Dask in a running cluster.
14
+
15
+ (This is a bit of a hack. It would be nicer to have a way to tell coiled.Cluster not to
16
+ create, just retrieve. But Cluster is a bit hard to deal with... )
17
+ """
18
+
19
+ cluster_info = cloud._get_cluster_details_synced(cluster_id=cluster_id) # type: ignore
20
+
21
+ if ProcessStateEnum(cluster_info["scheduler"]["current_state"]["state"]) != ProcessStateEnum.started:
22
+ scheduler_state = cluster_info["scheduler"]["current_state"]["state"]
23
+ raise RuntimeError(f"Cannot get security info for cluster {cluster_id}, scheduler state is {scheduler_state}")
24
+
25
+ public_ip = cluster_info["scheduler"]["instance"]["public_ip_address"]
26
+ private_ip = cluster_info["scheduler"]["instance"]["private_ip_address"]
27
+ tls_cert = cluster_info["cluster_options"]["tls_cert"]
28
+ tls_key = cluster_info["cluster_options"]["tls_key"]
29
+ scheduler_port = cluster_info["scheduler_port"]
30
+ dashboard_address = cluster_info["scheduler"]["dashboard_address"]
31
+ give_scheduler_public_ip = cluster_info["cluster_infra"]["give_scheduler_public_ip"]
32
+
33
+ private_address = f"tls://{private_ip}:{scheduler_port}"
34
+ public_address = f"tls://{public_ip}:{scheduler_port}"
35
+
36
+ use_public_address = give_scheduler_public_ip and use_scheduler_public_ip
37
+ if use_public_address:
38
+ if not public_ip:
39
+ raise RuntimeError(
40
+ "Your Coiled client is configured to use the public IP address, but the scheduler VM does not "
41
+ "have a public IP address."
42
+ )
43
+ address_to_use = public_address
44
+ else:
45
+ address_to_use = private_address
46
+
47
+ security_info = {
48
+ "tls_key": tls_key,
49
+ "tls_cert": tls_cert,
50
+ "dashboard_address": dashboard_address,
51
+ "public_address": public_address,
52
+ "private_address": private_address,
53
+ "address_to_use": address_to_use,
54
+ }
55
+
56
+ return address_to_use, security_info
57
+
58
+
59
+ def get_comm_from_connection_info(address, security):
60
+ from distributed import rpc
61
+
62
+ from coiled.utils import GatewaySecurity
63
+
64
+ security_obj = GatewaySecurity(security["tls_key"], security["tls_cert"])
65
+ return rpc(address, connection_args=security_obj.get_connection_args("client"))
66
+
67
+
68
+ def use_comm_rpc(cloud, comm, function, **kwargs):
69
+ async def foo():
70
+ await getattr(comm, function)(**kwargs)
71
+
72
+ cloud._sync(foo)
coiled/v2/core.py CHANGED
@@ -638,9 +638,12 @@ class CloudV2(OldCloud, Generic[IsAsynchronous]):
638
638
  batch_job_ids: List[int] | None = None,
639
639
  extra_user_container: str | None = None,
640
640
  extra_user_container_ignore_entrypoint: bool | None = None,
641
+ scheduler_sidecars: list[dict] | None = None,
642
+ worker_sidecars: list[dict] | None = None,
641
643
  host_setup_script_content: str | None = None,
642
644
  pause_on_exit: bool | None = None,
643
645
  cluster_timeout_seconds: int | None = None,
646
+ filestores_to_attach: list[dict] | None = None,
644
647
  ) -> Tuple[int, bool]:
645
648
  # TODO (Declarative): support these args, or decide not to
646
649
  # https://gitlab.com/coiled/cloud/-/issues/4305
@@ -676,9 +679,13 @@ class CloudV2(OldCloud, Generic[IsAsynchronous]):
676
679
  "batch_job_ids": batch_job_ids,
677
680
  "extra_user_container": extra_user_container,
678
681
  "extra_user_container_ignore_entrypoint": extra_user_container_ignore_entrypoint,
682
+ "scheduler_sidecars": scheduler_sidecars,
683
+ "worker_sidecars": worker_sidecars,
679
684
  "host_setup_script": host_setup_script_content,
680
685
  "pause_on_exit": pause_on_exit,
681
686
  "cluster_timeout_seconds": cluster_timeout_seconds,
687
+ "coiled_cloud_env_image": dask.config.get("coiled.cloud-env-image", None),
688
+ "filestores_to_attach": filestores_to_attach,
682
689
  }
683
690
 
684
691
  try:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: coiled
3
- Version: 1.118.4.dev6
3
+ Version: 1.129.3.dev10
4
4
  Summary: Python client for coiled.io dask clusters
5
5
  Project-URL: Homepage, https://coiled.io
6
6
  Maintainer-email: Coiled <info@coiled.io>
@@ -2,47 +2,51 @@ coiled/__init__.py,sha256=SslrfRlKfunoTJMCfopqezFePDDKS8LG_FhLkMMo_TE,2274
2
2
  coiled/__main__.py,sha256=4XILBmm4ChZYo7h3JzgslFU0tjQVzdX0XtYcQLhCv0w,171
3
3
  coiled/analytics.py,sha256=96CeL8KVnm3-76lvT4fNkgML0lHebaLea-YP3wW-KqM,7486
4
4
  coiled/auth.py,sha256=go7vWtCwBbwtWyNrNBxg28xBrdjrETbE-mn3KaN5Yl8,1867
5
- coiled/batch.py,sha256=1gsUboonjwOQjFa55VFL3pdBWhZ-Vh-fk8N8qkWNqa4,6214
6
- coiled/capture_environment.py,sha256=K5mNwUe8KM_l203h3oZvcZUJTrGozT-CH1GrtuPYv8U,18458
5
+ coiled/batch.py,sha256=QH-BMlMKkjdToPbw6q0I1W1TTJIDHu24B363mUGDL2c,7102
6
+ coiled/capture_environment.py,sha256=YYNk_T4xOcw8vmFIOcy19d5-ptDvoes3RWa7fRy0sB4,17750
7
7
  coiled/cluster.py,sha256=wwK9-SefbFBUEHJjYHXlWN3YvPcvR6XD2J-RdPCGhgc,5049
8
8
  coiled/coiled.yaml,sha256=z70xzNUy0E8b8Yt12tYYmjJDDmp-U63oUD61ccuu5N0,1037
9
9
  coiled/compatibility.py,sha256=pZAPgTnqPaPpuZ6ZmCXgm0TJNenZPLBnIq4CaohwMY4,762
10
10
  coiled/config.py,sha256=O_dIj_PJ5qIA3MGJZRvqli4ztE3oLZQ-3xnhJlAD-Ts,196
11
- coiled/context.py,sha256=MXWsW0swdYU-x32U7NiM0xt-t65maiEO8rvsGGeScFw,4754
11
+ coiled/context.py,sha256=BKJ26u-eNpe1dVDL69Q4ZJAkPzptNKRuQ94ZciT-PAY,4754
12
12
  coiled/core.py,sha256=Cu6hKBXRWSztbpF8huAyU_1glnt1gacnO9vExvG-Cwo,110796
13
13
  coiled/errors.py,sha256=5aXhNXgidMm0VgPYT3MZMwlHhRE57MeSmqAJFHYaa8Y,305
14
14
  coiled/exceptions.py,sha256=jUXgmfO0LitGe8ztSmAlzb9eQV3X5c0kNO2BwtEDTYg,3099
15
+ coiled/filestore.py,sha256=Tc594sTm2e_TGsuxYQNFO8jCyT2fxjRMVzEmNRAJdCM,17952
15
16
  coiled/function.py,sha256=pONtcTUDRr0dykhVV73AWXqU_pb-4-lvOA0tR3i_PlA,21654
16
- coiled/plugins.py,sha256=c6T0eVs7C1bxvHQAZFwlToCaCa13o94g_2cd871BtCY,3271
17
+ coiled/plugins.py,sha256=w03H2Sck54QmwrVOM1BVscNiVeQsHyGm1yWNzPPPWKs,3424
17
18
  coiled/prefect.py,sha256=j1EOg7Xuw82TNRonAGEoZ3ANlwN8GM5aDXRYSjC0lnA,1497
18
- coiled/pypi_conda_map.py,sha256=qo2Jk1qqNl52H8nlh-JCFCRbJ-jo1YejRgyr1PhuxWw,9427
19
+ coiled/pypi_conda_map.py,sha256=GlLqvSjqvFoEPsoIVZ7so4JH3j-Z9oHKwf77UoQ7d7s,9865
19
20
  coiled/scan.py,sha256=ghAo7TKAG7E013HJpYWbic-Kp_UUf8iu533GaBpYnS8,25760
20
21
  coiled/software.py,sha256=eh3kZ8QBuIt_SPvTy_x6TXEv87SGqOJkO4HW-LCSsas,8701
21
- coiled/software_utils.py,sha256=TMrNzG_B0cRL49msGvNZbWm1P6rLL1DorOrs-nMvJyc,35218
22
- coiled/spans.py,sha256=JRlgzO7Y1pdUUlGQbWw-Z8zgxIdfP8f0T4gmv1pOip8,1987
22
+ coiled/software_utils.py,sha256=zXqhIopDtB-xp_eJJje1W9nfXjqmvVPMIfQUs1XSu0I,40783
23
+ coiled/spans.py,sha256=Aq2MOX6JXaJ72XiEmymPcsefs-kID85MEw6t-kOdPWI,2078
23
24
  coiled/spark.py,sha256=kooZCZT4dLMG_AQEOlaf6gj86G3UdowDfbw-Eiq94MU,9059
24
- coiled/types.py,sha256=G2SAprLouhf5GpoO3KnSxRlBtUP7_cI4xc7xQK_6bfE,13873
25
- coiled/utils.py,sha256=TJTmNSaTlxL7tXerziqSRFxtE5DCQEILZOgRE7MQUG4,76464
25
+ coiled/types.py,sha256=xJh5t_Kk7S-LeZnZ5C4oTtl1_el3mZuQeITz1QfPHjA,14619
26
+ coiled/utils.py,sha256=WalMzNUbjVUJvAMgXaTTyDC0HPSM_zsHiYRHK7lmkkk,78514
26
27
  coiled/websockets.py,sha256=BaCNiOgPVtm55R_rf1TK78tzoFSKLp4z2UCW7S57iNA,5956
27
28
  coiled/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
29
  coiled/cli/config.py,sha256=WKZkDpPYywYS665krQLqid2RgSchDOddZqBkkwBtyVk,2267
29
- coiled/cli/core.py,sha256=aAKsw14D0fEQgonfbxYrzoqp4-AKYe0f4kIc4M-dtvs,1068
30
- coiled/cli/curl.py,sha256=ixgo-qVsFgilyOZ5Klu24Msm6PLDuB8TnGiibHJNJto,1466
30
+ coiled/cli/core.py,sha256=iZC9v_LyhDOlQwvb5p4w6ul2sfzI7aNiZFHgIWIoNns,1186
31
+ coiled/cli/curl.py,sha256=cYDQXvc1kZNDhFMkREhZoGYh0a9Ea06tEbobK8ZBCJ8,1589
31
32
  coiled/cli/diagnostics.py,sha256=1jIeue7xLOaf7LQFsNc6NmO5yU1jqmPFpKZSKjGN4rs,394
32
33
  coiled/cli/env.py,sha256=NHh7ZSq9yfongkpFqzon1eLhnH1FwToVvkKFIhqXRBE,6932
34
+ coiled/cli/file.py,sha256=fJmOG3YhxpxXokGYu90wpjdwkJpp1XVqPJ_iveb5ShA,3623
33
35
  coiled/cli/login.py,sha256=cByVXmMsfGEuY2TkYU_Y8zq1zVTUHAxOe_wpw2uHsBs,2242
36
+ coiled/cli/mpi.py,sha256=mg0GakT_vwX3-PJKZYDaAIFSe8sly9GE5HFB8sy57jQ,8874
34
37
  coiled/cli/package_sync.py,sha256=lABDY20yjfLYGfPlQu8ugI-Q8doY4JtN8_0nb9PkcT4,4101
35
38
  coiled/cli/prefect.py,sha256=T-SSFey4jlA_jpEI0DqAhVIPwlt2GvBFogEqYCwwevI,302
36
39
  coiled/cli/prefect_serve.py,sha256=gemq6YOVbnBoq4k3tSaU2gFJR3aMSxXLNxH6jB8V3n8,4378
37
- coiled/cli/run.py,sha256=Nu2Svh7ey-qxXU5nekLjrDmVpuD_wIEN1vGpPxZLkio,30078
40
+ coiled/cli/run.py,sha256=Lwv9n6NSU9vKRYguBbZriwoWpN-uzqvyr1kE3dctUv8,31678
38
41
  coiled/cli/sync.py,sha256=S5PzB9GSPJn3HvviOMLKVbo4ET46FlPwLYK_7sRyonQ,9726
39
42
  coiled/cli/utils.py,sha256=cp7ToFGRpUKi6iNL6BbLjzgrgeTYSX_C55lYhaKWHHA,3479
40
43
  coiled/cli/batch/__init__.py,sha256=539CnfnqqcW7ndSufTS-Ie5FGZiElMYxE0Ptu70wo8M,660
41
44
  coiled/cli/batch/list.py,sha256=lU3mXeKUHltzpdbo7Txcd64T1-XpHE-wxTFIBoTR28w,3300
42
45
  coiled/cli/batch/logs.py,sha256=CbGK5GRjtu0UKGetKY_yTcjB-3PdHy5Xf4MLksYTE8g,1947
43
- coiled/cli/batch/run.py,sha256=ZgYVw6ByP0WGA68T0zD0czClYeytFPPeS-PWhd0so9I,31755
46
+ coiled/cli/batch/run.py,sha256=jywwmcJbgwfNOZt1eqZRq4a-2n3BZa4zt4fylgz2Grw,37125
44
47
  coiled/cli/batch/status.py,sha256=cSpMRC0YHFQ18y-XEXFjTOza_OTd7mtT-NX7sRSLMhk,4193
45
- coiled/cli/batch/wait.py,sha256=qe9Tk3ccjijVlpr8jhPaKoPdvLtA8wkyss50QA4z1Do,3559
48
+ coiled/cli/batch/util.py,sha256=ztisJzDHpsYswrdk_hI7USUkfponI8oLhcKAa6DXJo4,1026
49
+ coiled/cli/batch/wait.py,sha256=dEP1OH0IYteqaYU2UdrGm_vU7IDE7h3l3Cb3KBoaCCY,3879
46
50
  coiled/cli/cluster/__init__.py,sha256=dKmYCta0r2EsW0FlPu8GM_bqWl8obskEc5xM332Dw4k,716
47
51
  coiled/cli/cluster/azure_logs.py,sha256=6CPoAm5PZue56gjGL8eQBBNklebyW48OLjhLOFXL4cA,1990
48
52
  coiled/cli/cluster/better_logs.py,sha256=i0prE_clVHKxnL0403mHWY1aT68l8NwM6Vy4uc2Zecs,15168
@@ -54,7 +58,7 @@ coiled/cli/cluster/metrics.py,sha256=mzMEYNX3LvDTXMEf8tBR3qGyHNN7wOBBhiWRGMPmlMA
54
58
  coiled/cli/cluster/ssh.py,sha256=ylUw_Gpco9v4w9ktBXq1eIkVIGm4OyJ73vR681Ib-QA,12276
55
59
  coiled/cli/cluster/utils.py,sha256=APKvldmBxXaxmEmKz8rV0_W-o1plF9FKAGGsWgIQxJU,1623
56
60
  coiled/cli/hello/__init__.py,sha256=iKYsdmn4CNN5LR6vC7emMHuA9D578GkS2yRz-CTsClk,188
57
- coiled/cli/hello/hello.py,sha256=WlW0aowmlaswlLlsG6LnyTr9ZuZE0_cfo1MXWPMD7mo,11088
61
+ coiled/cli/hello/hello.py,sha256=a5YWkjtZtC1sn6sQhk8C6HdjYP8exWlPcYAMd9BIMPc,11094
58
62
  coiled/cli/hello/utils.py,sha256=4hwBsv0z6p0nnWWEtPGpFRJNrWZYtdm4ZOS_UNUU7ZY,8427
59
63
  coiled/cli/hello/examples/__init__.py,sha256=YYvvsblGjJ36AbTv9B2FPM2yvzLNSMBIMeM38yVI_EM,313
60
64
  coiled/cli/hello/examples/exit.py,sha256=H-gWgjZtT2ZGwSJu1mRTu8wr0r_bC-WvIXpZlDIz-0E,178
@@ -67,33 +71,34 @@ coiled/cli/hello/scripts/nyc_parquet.py,sha256=Zu7cWgVn1ZdSYpoNB8Dh9i3nMmA916Ge8
67
71
  coiled/cli/hello/scripts/pytorch.py,sha256=aTZSHFI58D_DQ4i4yCDAdNbdg5dEU5NX_THFFNUJRKk,2264
68
72
  coiled/cli/hello/scripts/xarray_nwm.py,sha256=Gh2etfuLY7fXKDPJ6ppEy9vh_qZnlOP2vbotT6CBd7s,825
69
73
  coiled/cli/notebook/__init__.py,sha256=khKDheFK7ociEbb1ODp5oygNLBo_1K7PsL08OaEMEr0,984
70
- coiled/cli/notebook/notebook.py,sha256=ys6c0FlTmRx3l4sAnhmNs8xahkWXZBuwt9RiGx1wnZA,23009
74
+ coiled/cli/notebook/notebook.py,sha256=i_XD03RK2cYeYn_TVl20Uv-kJ_2x-0Oe5iRUTm6w1Tc,23293
71
75
  coiled/cli/setup/__init__.py,sha256=BiGnIH9vXGhCFOEPuSUkitcrwAA97wTsfcwMXC0DkYg,837
72
76
  coiled/cli/setup/amp.py,sha256=_zlZtqsd_LkSF5C_G8qDm0To-t30C0Z6XKMdDzrm7qg,5039
73
- coiled/cli/setup/aws.py,sha256=ptGZzX5GNl-vt2BR5okWhPqW-nTaOG6EP7N1sM7SB14,64744
74
- coiled/cli/setup/azure.py,sha256=JNBdKyY9NTcWfr6NAb4d-QM45y6bPBuFFCsCY21equI,25288
77
+ coiled/cli/setup/aws.py,sha256=MS4Au1AGoALeVO_VuTdq_RRzL3JzOGgpTgcM69avXU0,65885
78
+ coiled/cli/setup/azure.py,sha256=TPYs1LPMf7MvUP9n2KiCLICVwspH-mxusMH-kVEjJoM,26739
75
79
  coiled/cli/setup/entry.py,sha256=2PKtvH_ARWt5c5qjeb7dfmJOcFTqRGoskPidNoQTiOg,2425
76
80
  coiled/cli/setup/gcp.py,sha256=i67kFRJJpDORrqkVfDu1jFseN80iDbKe1vswk6jxRI8,38817
77
81
  coiled/cli/setup/prometheus.py,sha256=ZW16-vFhdYkbbVWqO-jiY2GtpD-EREEa7bm4S8TTg1k,2256
78
82
  coiled/cli/setup/util.py,sha256=-EIV098Euu1rZ0iq_fB7Uno_UUCv7PaJSiHvyxbI_vM,611
79
83
  coiled/credentials/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
80
84
  coiled/credentials/aws.py,sha256=_HHGPgDRzFNN9iLxgYV-XCR1Fzdw0ez1CI38Jt3cr1A,4615
81
- coiled/credentials/google.py,sha256=f7tK07gMSAlSPuwSUZAH5aCQxg94BSQWQLCSpOEUrwU,10001
85
+ coiled/credentials/google.py,sha256=UKAbo4UoYD85kZEKQBlydLvcQnSHXHPuMDUjW4TqhKE,9222
82
86
  coiled/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
87
  coiled/extensions/prefect/__init__.py,sha256=cZp1mqX29FrnINoQsuH6pz4z4uuOACs0mgiG9_AteAk,72
84
88
  coiled/extensions/prefect/runners.py,sha256=AcaGS1637TnqFPKnjmmLHpdzjwAsxBLDKrOF7OpfEwM,987
85
89
  coiled/extensions/prefect/workers.py,sha256=Z2VuAhTm5AjWEKyCniMZrTxqtkn3uJp3sO3bFeR2Rr0,1642
86
90
  coiled/v2/__init__.py,sha256=KaCULaAqatcsYbTbj_SQtTLocbSKZa-uQXiyCICKFRM,805
87
- coiled/v2/cluster.py,sha256=1emc6HVkcQMkWfWndrW2Yh55YbVLsP2RUJb5O71LQFc,146813
88
- coiled/v2/core.py,sha256=ZHr4LtoTZcabCJX7vDLR7WrK4eut2ASyS9A-QOjEH3I,71207
91
+ coiled/v2/cluster.py,sha256=5-dq9Vfs7R28MEzeZW4nNxmhlNLvASDQXNglKFVtkF4,148111
92
+ coiled/v2/cluster_comms.py,sha256=UcJWLeZlc68S0uaNd9lLKbF5uaDhYqqkdTsA0CBXYRI,2643
93
+ coiled/v2/core.py,sha256=Bf5A_rzK3tuUqqMVAgN5vix-tX_F8AEWR2pICnG3YcA,71615
89
94
  coiled/v2/cwi_log_link.py,sha256=d4k6wRYhcdDVdhWYZIX6WL1g0lscXY0yq__H1sPUNWk,1883
90
95
  coiled/v2/states.py,sha256=VduyWuf6rByG_wg5AXTxZpe88cCTSdIa4HrPjk1jBcA,9031
91
96
  coiled/v2/widgets/__init__.py,sha256=Bt3GHTTyri-kFUaqGRVydDM-sCg5NdNujDg2RyvgV8U,983
92
97
  coiled/v2/widgets/interface.py,sha256=YeMQ5qdRbbpM04x9qIg2LE1xwxyRxFbdDYnkrwHazPk,301
93
98
  coiled/v2/widgets/rich.py,sha256=3rU5-yso92NdeEh3uSvEE-GwPNyp6i0Nb5PE5czXCik,28974
94
99
  coiled/v2/widgets/util.py,sha256=Y8qpGqwNzqfCzgyRFRy7vcscBoXqop-Upi4HLPpXLgg,3120
95
- coiled-1.118.4.dev6.dist-info/METADATA,sha256=MKDAAy7B9Vk6Ke2gaWWoNDB5WoJnkkfPbq1xyMNaX5A,2181
96
- coiled-1.118.4.dev6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
97
- coiled-1.118.4.dev6.dist-info/entry_points.txt,sha256=C8dz1ST_bTlTO-kNvuHBJQma9PyJPotg0S4xpPt5aHY,47
98
- coiled-1.118.4.dev6.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
99
- coiled-1.118.4.dev6.dist-info/RECORD,,
100
+ coiled-1.129.3.dev10.dist-info/METADATA,sha256=aK3dJPlFqnpRc81jy1Q2T9VFDJd4NOGcQm3UtPhItQg,2182
101
+ coiled-1.129.3.dev10.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
102
+ coiled-1.129.3.dev10.dist-info/entry_points.txt,sha256=C8dz1ST_bTlTO-kNvuHBJQma9PyJPotg0S4xpPt5aHY,47
103
+ coiled-1.129.3.dev10.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
104
+ coiled-1.129.3.dev10.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any