skypilot-nightly 1.0.0.dev20241125__py3-none-any.whl → 1.0.0.dev20241127__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sky/__init__.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import urllib.request
6
6
 
7
7
  # Replaced with the current commit when building the wheels.
8
- _SKYPILOT_COMMIT_SHA = '894330bd5455e8b6739a5c22abdee2529f5fe282'
8
+ _SKYPILOT_COMMIT_SHA = '23f9821bd165c8d154a44c3505a18b1e28cc0f48'
9
9
 
10
10
 
11
11
  def _get_git_commit():
@@ -35,7 +35,7 @@ def _get_git_commit():
35
35
 
36
36
 
37
37
  __commit__ = _get_git_commit()
38
- __version__ = '1.0.0.dev20241125'
38
+ __version__ = '1.0.0.dev20241127'
39
39
  __root_dir__ = os.path.dirname(os.path.abspath(__file__))
40
40
 
41
41
 
sky/cli.py CHANGED
@@ -486,7 +486,7 @@ def _parse_override_params(
486
486
  image_id: Optional[str] = None,
487
487
  disk_size: Optional[int] = None,
488
488
  disk_tier: Optional[str] = None,
489
- ports: Optional[Tuple[str]] = None) -> Dict[str, Any]:
489
+ ports: Optional[Tuple[str, ...]] = None) -> Dict[str, Any]:
490
490
  """Parses the override parameters into a dictionary."""
491
491
  override_params: Dict[str, Any] = {}
492
492
  if cloud is not None:
@@ -539,7 +539,14 @@ def _parse_override_params(
539
539
  else:
540
540
  override_params['disk_tier'] = disk_tier
541
541
  if ports:
542
- override_params['ports'] = ports
542
+ if any(p.lower() == 'none' for p in ports):
543
+ if len(ports) > 1:
544
+ with ux_utils.print_exception_no_traceback():
545
+ raise ValueError('Cannot specify both "none" and other '
546
+ 'ports.')
547
+ override_params['ports'] = None
548
+ else:
549
+ override_params['ports'] = ports
543
550
  return override_params
544
551
 
545
552
 
@@ -730,7 +737,7 @@ def _make_task_or_dag_from_entrypoint_with_overrides(
730
737
  image_id: Optional[str] = None,
731
738
  disk_size: Optional[int] = None,
732
739
  disk_tier: Optional[str] = None,
733
- ports: Optional[Tuple[str]] = None,
740
+ ports: Optional[Tuple[str, ...]] = None,
734
741
  env: Optional[List[Tuple[str, str]]] = None,
735
742
  field_to_ignore: Optional[List[str]] = None,
736
743
  # job launch specific
@@ -1084,7 +1091,7 @@ def launch(
1084
1091
  env: List[Tuple[str, str]],
1085
1092
  disk_size: Optional[int],
1086
1093
  disk_tier: Optional[str],
1087
- ports: Tuple[str],
1094
+ ports: Tuple[str, ...],
1088
1095
  idle_minutes_to_autostop: Optional[int],
1089
1096
  down: bool, # pylint: disable=redefined-outer-name
1090
1097
  retry_until_up: bool,
@@ -324,9 +324,8 @@ def get_common_gpus() -> List[str]:
324
324
  'A100',
325
325
  'A100-80GB',
326
326
  'H100',
327
- 'K80',
328
327
  'L4',
329
- 'M60',
328
+ 'L40S',
330
329
  'P100',
331
330
  'T4',
332
331
  'V100',
@@ -337,13 +336,13 @@ def get_common_gpus() -> List[str]:
337
336
  def get_tpus() -> List[str]:
338
337
  """Returns a list of TPU names."""
339
338
  # TODO(wei-lin): refactor below hard-coded list.
340
- # There are many TPU configurations available, we show the three smallest
341
- # and the largest configuration for the latest gen TPUs.
339
+ # There are many TPU configurations available, we show the some smallest
340
+ # ones for each generation, and people should find larger ones with
341
+ # sky show-gpus tpu.
342
342
  return [
343
- 'tpu-v2-512', 'tpu-v3-2048', 'tpu-v4-8', 'tpu-v4-16', 'tpu-v4-32',
344
- 'tpu-v4-3968', 'tpu-v5litepod-1', 'tpu-v5litepod-4', 'tpu-v5litepod-8',
345
- 'tpu-v5litepod-256', 'tpu-v5p-8', 'tpu-v5p-32', 'tpu-v5p-128',
346
- 'tpu-v5p-12288'
343
+ 'tpu-v2-8', 'tpu-v3-8', 'tpu-v4-8', 'tpu-v4-16', 'tpu-v4-32',
344
+ 'tpu-v5litepod-1', 'tpu-v5litepod-4', 'tpu-v5litepod-8', 'tpu-v5p-8',
345
+ 'tpu-v5p-16', 'tpu-v5p-32', 'tpu-v6e-1', 'tpu-v6e-4', 'tpu-v6e-8'
347
346
  ]
348
347
 
349
348
 
@@ -64,7 +64,7 @@ FAMILY_NAME_TO_SKYPILOT_GPU_NAME = {
64
64
  'standardNVSv2Family': 'M60',
65
65
  'standardNVSv3Family': 'M60',
66
66
  'standardNVPromoFamily': 'M60',
67
- 'standardNVSv4Family': 'Radeon MI25',
67
+ 'standardNVSv4Family': 'MI25',
68
68
  'standardNDSFamily': 'P40',
69
69
  'StandardNVADSA10v5Family': 'A10',
70
70
  'StandardNCadsH100v5Family': 'H100',
@@ -306,7 +306,7 @@ def _create_vm(
306
306
  identity=compute.VirtualMachineIdentity(
307
307
  type='UserAssigned',
308
308
  user_assigned_identities={provider_config['msi']: {}}),
309
- priority=node_config['azure_arm_parameters']['priority'])
309
+ priority=node_config['azure_arm_parameters'].get('priority', None))
310
310
  vm_poller = compute_client.virtual_machines.begin_create_or_update(
311
311
  resource_group_name=provider_config['resource_group'],
312
312
  vm_name=vm_name,
@@ -1045,11 +1045,15 @@ def get_kube_config_context_namespace(
1045
1045
  the default namespace.
1046
1046
  """
1047
1047
  k8s = kubernetes.kubernetes
1048
- # Get namespace if using in-cluster config
1049
1048
  ns_path = '/var/run/secrets/kubernetes.io/serviceaccount/namespace'
1050
- if os.path.exists(ns_path):
1051
- with open(ns_path, encoding='utf-8') as f:
1052
- return f.read().strip()
1049
+ # If using in-cluster context, get the namespace from the service account
1050
+ # namespace file. Uses the same logic as adaptors.kubernetes._load_config()
1051
+ # to stay consistent with in-cluster config loading.
1052
+ if (context_name == kubernetes.in_cluster_context_name() or
1053
+ context_name is None):
1054
+ if os.path.exists(ns_path):
1055
+ with open(ns_path, encoding='utf-8') as f:
1056
+ return f.read().strip()
1053
1057
  # If not in-cluster, get the namespace from kubeconfig
1054
1058
  try:
1055
1059
  contexts, current_context = k8s.config.list_kube_config_contexts()
@@ -1136,7 +1140,11 @@ class KubernetesInstanceType:
1136
1140
  name = (f'{common_utils.format_float(self.cpus)}CPU--'
1137
1141
  f'{common_utils.format_float(self.memory)}GB')
1138
1142
  if self.accelerator_count:
1139
- name += f'--{self.accelerator_count}{self.accelerator_type}'
1143
+ # Replace spaces with underscores in accelerator type to make it a
1144
+ # valid logical instance type name.
1145
+ assert self.accelerator_type is not None, self.accelerator_count
1146
+ acc_name = self.accelerator_type.replace(' ', '_')
1147
+ name += f'--{self.accelerator_count}{acc_name}'
1140
1148
  return name
1141
1149
 
1142
1150
  @staticmethod
@@ -1167,7 +1175,9 @@ class KubernetesInstanceType:
1167
1175
  accelerator_type = match.group('accelerator_type')
1168
1176
  if accelerator_count:
1169
1177
  accelerator_count = int(accelerator_count)
1170
- accelerator_type = str(accelerator_type)
1178
+ # This is to revert the accelerator types with spaces back to
1179
+ # the original format.
1180
+ accelerator_type = str(accelerator_type).replace('_', ' ')
1171
1181
  else:
1172
1182
  accelerator_count = None
1173
1183
  accelerator_type = None
@@ -173,6 +173,7 @@ setup_commands:
173
173
  # Line 'sudo grep ..': set the number of threads per process to unlimited to avoid ray job submit stucking issue when the number of running ray jobs increase.
174
174
  # Line 'mkdir -p ..': disable host key check
175
175
  # Line 'python3 -c ..': patch the buggy ray files and enable `-o allow_other` option for `goofys`
176
+ # Line 'rm ~/.aws/credentials': explicitly remove the credentials file to be safe. This is to guard against the case where the credential files was uploaded once as `remote_identity` was not set in a previous launch.
176
177
  - mkdir -p ~/.ssh; touch ~/.ssh/config;
177
178
  {%- for initial_setup_command in initial_setup_commands %}
178
179
  {{ initial_setup_command }}
@@ -185,7 +186,12 @@ setup_commands:
185
186
  sudo grep -e '^DefaultTasksMax' /etc/systemd/system.conf || (sudo bash -c 'echo "DefaultTasksMax=infinity" >> /etc/systemd/system.conf'); sudo systemctl set-property user-$(id -u $(whoami)).slice TasksMax=infinity; sudo systemctl daemon-reload;
186
187
  {%- endif %}
187
188
  mkdir -p ~/.ssh; (grep -Pzo -q "Host \*\n StrictHostKeyChecking no" ~/.ssh/config) || printf "Host *\n StrictHostKeyChecking no\n" >> ~/.ssh/config;
188
- [ -f /etc/fuse.conf ] && sudo sed -i 's/#user_allow_other/user_allow_other/g' /etc/fuse.conf || (sudo sh -c 'echo "user_allow_other" > /etc/fuse.conf'); # This is needed for `-o allow_other` option for `goofys`;
189
+ [ -f /etc/fuse.conf ] && sudo sed -i 's/#user_allow_other/user_allow_other/g' /etc/fuse.conf || (sudo sh -c 'echo "user_allow_other" > /etc/fuse.conf');
190
+ {%- if remote_identity != 'LOCAL_CREDENTIALS' %}
191
+ rm ~/.aws/credentials || true;
192
+ {%- endif %}
193
+
194
+
189
195
 
190
196
  # Command to start ray clusters are now placed in `sky.provision.instance_setup`.
191
197
  # We do not need to list it here anymore.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: skypilot-nightly
3
- Version: 1.0.0.dev20241125
3
+ Version: 1.0.0.dev20241127
4
4
  Summary: SkyPilot: An intercloud broker for the clouds
5
5
  Author: SkyPilot Team
6
6
  License: Apache 2.0
@@ -1,8 +1,8 @@
1
- sky/__init__.py,sha256=MGfQ2l_7Ed92jVfreJmVbhoyb14CRXKGvq7w0z5gKPs,5882
1
+ sky/__init__.py,sha256=uN7QkrYaybENKNlIFrJ3VXkcoAb2TTfv_VFyNv2yHT4,5882
2
2
  sky/admin_policy.py,sha256=hPo02f_A32gCqhUueF0QYy1fMSSKqRwYEg_9FxScN_s,3248
3
3
  sky/authentication.py,sha256=kACHmiZgWgRpYd1wx1ofbXRMErfMcFmWrkw4a9NxYrY,20988
4
4
  sky/check.py,sha256=D3Y3saIFAYVvPxuBHnVgJEO0fUVDxgjwuMBaO-D778k,9472
5
- sky/cli.py,sha256=2QrlLeMwKpVKYOBDwtgs9zkBvNgn9Rg3XKk9aE6_0eY,213418
5
+ sky/cli.py,sha256=0sLOr7lBg2eKeFOgkW2ZS4RYb-hDccM78pVNVnXu_Gs,213764
6
6
  sky/cloud_stores.py,sha256=RjFgmRhUh1Kk__f6g3KxzLp9s7dA0pFK4W1AukEuUaw,21153
7
7
  sky/core.py,sha256=0-4W_DKJZgbwXuzNZKQ2R_qJxqxbqqNfyi0U0PQBKvQ,38230
8
8
  sky/dag.py,sha256=f3sJlkH4bE6Uuz3ozNtsMhcBpRx7KmC9Sa4seDKt4hU,3104
@@ -55,7 +55,7 @@ sky/clouds/paperspace.py,sha256=0UxOcv_NaLY5hrFoAA_ZienclZUOqzf0yxXXZu4jXG0,1089
55
55
  sky/clouds/runpod.py,sha256=UlHFPQY4wGGi0gLDO-vZoeJcgbQTCYXh4Pk8mKQBNUk,11515
56
56
  sky/clouds/scp.py,sha256=JHyMqkAAqr9lJq79IVjj3rU1g-ZCCGLZTJEzIhYsw7c,15845
57
57
  sky/clouds/vsphere.py,sha256=LzO-Mc-zDgpaDYZxNKGdEFa0eR5DHpTgKsPX60mPi10,12280
58
- sky/clouds/service_catalog/__init__.py,sha256=cFZ3HLdQVa42xOhK2XxuB_xPIX4X1UR89InR4y2y_78,14757
58
+ sky/clouds/service_catalog/__init__.py,sha256=p4V0GGeumT8yt01emqDM7Au45H5jvPfGNqdI6L2W3uM,14750
59
59
  sky/clouds/service_catalog/aws_catalog.py,sha256=j33lNC5GXWK6CiGWZORCnumGlRODmCAT2_lfWp0YtBc,13106
60
60
  sky/clouds/service_catalog/azure_catalog.py,sha256=5Q51x_WEKvQ2YSgJvZHRH3URlbwIstYuwpjaWW_wJlw,8149
61
61
  sky/clouds/service_catalog/common.py,sha256=qHNLzh59W34CSSCCztu75n69TuGyDQ310SQc_P-t544,27700
@@ -75,7 +75,7 @@ sky/clouds/service_catalog/vsphere_catalog.py,sha256=OV3Czi3vwRSW4lqVPHxU_GND0ox
75
75
  sky/clouds/service_catalog/data_fetchers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
76
76
  sky/clouds/service_catalog/data_fetchers/analyze.py,sha256=VdksJQs3asFE8H5T3ZV1FJas2xD9WEX6c-V5p7y-wp4,2084
77
77
  sky/clouds/service_catalog/data_fetchers/fetch_aws.py,sha256=ro2zazdkDF6z9bE7QFyjoeb4VFxmbNZ1WK5IQrdoQWk,23003
78
- sky/clouds/service_catalog/data_fetchers/fetch_azure.py,sha256=L1JsX1YrhpyI7ylzEPBBNE9XOZM0K0FIXbBUMj9h0MQ,12803
78
+ sky/clouds/service_catalog/data_fetchers/fetch_azure.py,sha256=K5jyfCAR5d-Hg78tDhmqpz0DQl79ndCW1ZRhBDLcBdM,12796
79
79
  sky/clouds/service_catalog/data_fetchers/fetch_cudo.py,sha256=52P48lvWN0s1ArjeLPeLemPRpxjSRcHincRle0nqdm4,3440
80
80
  sky/clouds/service_catalog/data_fetchers/fetch_fluidstack.py,sha256=yKuAFbjBRNz_e2RNNDT_aHHAuKQ86Ac7GKgIie5O6Pg,7273
81
81
  sky/clouds/service_catalog/data_fetchers/fetch_gcp.py,sha256=VrTTkMF5AjiplfDmvPBW-otR3oXGU3-oFouVMfIua4Q,33447
@@ -118,7 +118,7 @@ sky/provision/aws/utils.py,sha256=m49pS-SHGW7Au3bhDeTPsL8N5iRzbwOXzyEWRCc1Vho,32
118
118
  sky/provision/azure/__init__.py,sha256=87cgk1_Ws7n9rqaDDPv-HpfrkVeSQMdFQnhnXwyx9g4,548
119
119
  sky/provision/azure/azure-config-template.json,sha256=jrjAgOtpe0e6FSg3vsVqHKQqJe0w-HeWOFT1HuwzS2c,4712
120
120
  sky/provision/azure/config.py,sha256=V5-0Zelt4Xo0vcqnD6PpsnaCS7vc3xosDelILDAKSW4,8885
121
- sky/provision/azure/instance.py,sha256=7bbL5o8vgEeAA7iopPrd7Zh8pFs-SZpMzTeTmU4-zLU,49049
121
+ sky/provision/azure/instance.py,sha256=T9yzMGeYIqQVKkZ1paUWIkhRbPTrBCKmWjTFVixahcM,49059
122
122
  sky/provision/cudo/__init__.py,sha256=KAEl26MVPsk7IoP9Gg-MOJJRIV6-X9B0fbyHdyJWdLo,741
123
123
  sky/provision/cudo/config.py,sha256=RYOVkV0MoUqVBJRZiKhBZhjFygeyFs7eUdVMdPg1vds,327
124
124
  sky/provision/cudo/cudo_machine_type.py,sha256=_VNXWPELmlFXbtdcnPvkuLuyE9CZ923BUCdiac-ClDY,696
@@ -140,7 +140,7 @@ sky/provision/kubernetes/config.py,sha256=WEKcFXXhe89bLGAvoMiBvTDxdxkpTIA6ezrj2v
140
140
  sky/provision/kubernetes/instance.py,sha256=2zd_Z09amOsi0vPZjQYMJCkCWbN2YecMLL9HkmUuPrM,48414
141
141
  sky/provision/kubernetes/network.py,sha256=EpNjRQ131CXepqbdkoRKFu4szVrm0oKEpv1l8EgOkjU,12364
142
142
  sky/provision/kubernetes/network_utils.py,sha256=t1FS3K400fetH7cBuRgQJZl5_jEeMshsvsYmnMUcq8k,11399
143
- sky/provision/kubernetes/utils.py,sha256=UuicHqgqbpF937LGoc5tHLrweqPVtvrekDcpsaY_v_k,101557
143
+ sky/provision/kubernetes/utils.py,sha256=cnhmVcy8ri8iKMr404iugxBR2gQIXZiJVCxXwi3vglc,102225
144
144
  sky/provision/kubernetes/manifests/smarter-device-manager-configmap.yaml,sha256=AMzYzlY0JIlfBWj5eX054Rc1XDW2thUcLSOGMJVhIdA,229
145
145
  sky/provision/kubernetes/manifests/smarter-device-manager-daemonset.yaml,sha256=RtTq4F1QUmR2Uunb6zuuRaPhV7hpesz4saHjn3Ncsb4,2010
146
146
  sky/provision/lambda_cloud/__init__.py,sha256=6EEvSgtUeEiup9ivIFevHmgv0GqleroO2X0K7TRa2nE,612
@@ -218,7 +218,7 @@ sky/skylet/ray_patches/log_monitor.py.patch,sha256=CPoh3U_ogOHrkMOK7jaIRnwdzxjBT
218
218
  sky/skylet/ray_patches/resource_demand_scheduler.py.patch,sha256=AVV-Hw-Rxw16aFm4VsyzayX1QOvwmQuM79iVdSjkSl4,658
219
219
  sky/skylet/ray_patches/updater.py.patch,sha256=ZNMGVYICPBB44jLbEx2KvCgIY7BWYdDv3-2b2HJWmAQ,289
220
220
  sky/skylet/ray_patches/worker.py.patch,sha256=_OBhibdr3xOy5Qje6Tt8D1eQVm_msi50TJbCJmOTxVU,565
221
- sky/templates/aws-ray.yml.j2,sha256=fJUwkgXwkuackZI3UD7Fum4iJpkZttl6Jwy3MtYqL1I,8547
221
+ sky/templates/aws-ray.yml.j2,sha256=FK95sqSCrCkoAz9Cdtwq6_g3TqU-3048KE7pUaZfNNg,8814
222
222
  sky/templates/azure-ray.yml.j2,sha256=NQKg_f_S7WjsY90ykx0yNDNOGYnnEL3HS4pA3NMIZkM,6112
223
223
  sky/templates/cudo-ray.yml.j2,sha256=SEHVY57iBauCOE2HYJtYVFEKlriAkdwQu_p86a1n_bA,3548
224
224
  sky/templates/fluidstack-ray.yml.j2,sha256=t8TCULgiErCZdtFmBZVsA8ZdcqR7ccwsmQhuDFTBEAU,3541
@@ -275,9 +275,9 @@ sky/utils/kubernetes/k8s_gpu_labeler_job.yaml,sha256=k0TBoQ4zgf79-sVkixKSGYFHQ7Z
275
275
  sky/utils/kubernetes/k8s_gpu_labeler_setup.yaml,sha256=VLKT2KKimZu1GDg_4AIlIt488oMQvhRZWwsj9vBbPUg,3812
276
276
  sky/utils/kubernetes/rsync_helper.sh,sha256=h4YwrPFf9727CACnMJvF3EyK_0OeOYKKt4su_daKekw,1256
277
277
  sky/utils/kubernetes/ssh_jump_lifecycle_manager.py,sha256=RFLJ3k7MR5UN4SKHykQ0lV9SgXumoULpKYIAt1vh-HU,6560
278
- skypilot_nightly-1.0.0.dev20241125.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
279
- skypilot_nightly-1.0.0.dev20241125.dist-info/METADATA,sha256=V-eTZeyxhKUgaPzhNwlHoF39r1y2PEMx1dmytSCf3cw,20222
280
- skypilot_nightly-1.0.0.dev20241125.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
281
- skypilot_nightly-1.0.0.dev20241125.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
282
- skypilot_nightly-1.0.0.dev20241125.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
283
- skypilot_nightly-1.0.0.dev20241125.dist-info/RECORD,,
278
+ skypilot_nightly-1.0.0.dev20241127.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
279
+ skypilot_nightly-1.0.0.dev20241127.dist-info/METADATA,sha256=Hgq2Z8ubTWrSrRjki2Z10QPieQ5H6Tt1RUGJf5o-_h0,20222
280
+ skypilot_nightly-1.0.0.dev20241127.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
281
+ skypilot_nightly-1.0.0.dev20241127.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
282
+ skypilot_nightly-1.0.0.dev20241127.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
283
+ skypilot_nightly-1.0.0.dev20241127.dist-info/RECORD,,