skypilot-nightly 1.0.0.dev20250212__py3-none-any.whl → 1.0.0.dev20250214__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sky/__init__.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import urllib.request
6
6
 
7
7
  # Replaced with the current commit when building the wheels.
8
- _SKYPILOT_COMMIT_SHA = '1fe3fab0e7a3242f32039d55b456603350dc4196'
8
+ _SKYPILOT_COMMIT_SHA = '7170a91ca2605e15290689b675bc7640999f4aa1'
9
9
 
10
10
 
11
11
  def _get_git_commit():
@@ -35,7 +35,7 @@ def _get_git_commit():
35
35
 
36
36
 
37
37
  __commit__ = _get_git_commit()
38
- __version__ = '1.0.0.dev20250212'
38
+ __version__ = '1.0.0.dev20250214'
39
39
  __root_dir__ = os.path.dirname(os.path.abspath(__file__))
40
40
 
41
41
 
sky/clouds/kubernetes.py CHANGED
@@ -464,7 +464,9 @@ class Kubernetes(clouds.Cloud):
464
464
  # CPU resources on the node instead within the pod.
465
465
  custom_ray_options = {
466
466
  'object-store-memory': 500000000,
467
- 'num-cpus': str(int(cpus)),
467
+ # 'num-cpus' must be an integer, but we should not set it to 0 if
468
+ # cpus is <1.
469
+ 'num-cpus': str(max(int(cpus), 1)),
468
470
  }
469
471
  deploy_vars = {
470
472
  'instance_type': resources.instance_type,
sky/clouds/runpod.py CHANGED
@@ -177,6 +177,11 @@ class RunPod(clouds.Cloud):
177
177
  hourly_cost = self.instance_type_to_hourly_cost(
178
178
  instance_type=instance_type, use_spot=use_spot)
179
179
 
180
+ # default to root
181
+ docker_username_for_runpod = (resources.docker_username_for_runpod
182
+ if resources.docker_username_for_runpod
183
+ is not None else 'root')
184
+
180
185
  return {
181
186
  'instance_type': instance_type,
182
187
  'custom_resources': custom_resources,
@@ -184,6 +189,7 @@ class RunPod(clouds.Cloud):
184
189
  'image_id': image_id,
185
190
  'use_spot': use_spot,
186
191
  'bid_per_gpu': str(hourly_cost),
192
+ 'docker_username_for_runpod': docker_username_for_runpod,
187
193
  }
188
194
 
189
195
  def _get_feasible_launchable_resources(
@@ -28,6 +28,7 @@ REGIONS = [
28
28
  'asia-northeast-2',
29
29
  'us-east-1',
30
30
  'us-east-2',
31
+ 'us-east-3',
31
32
  'us-west-2',
32
33
  'us-west-1',
33
34
  'us-south-1',
sky/data/storage.py CHANGED
@@ -354,7 +354,8 @@ class AbstractStore:
354
354
  metadata.is_sky_managed),
355
355
  sync_on_reconstruction=override_args.get('sync_on_reconstruction',
356
356
  True),
357
- # backward compatibility
357
+ # Backward compatibility
358
+ # TODO: remove the hasattr check after v0.11.0
358
359
  _bucket_sub_path=override_args.get(
359
360
  '_bucket_sub_path',
360
361
  metadata._bucket_sub_path # pylint: disable=protected-access
@@ -1462,6 +1463,8 @@ class S3Store(AbstractStore):
1462
1463
  set to True, the directory is created in the bucket root and
1463
1464
  contents are uploaded to it.
1464
1465
  """
1466
+ sub_path = (f'/{self._bucket_sub_path}'
1467
+ if self._bucket_sub_path else '')
1465
1468
 
1466
1469
  def get_file_sync_command(base_dir_path, file_names):
1467
1470
  includes = ' '.join([
@@ -1469,8 +1472,6 @@ class S3Store(AbstractStore):
1469
1472
  for file_name in file_names
1470
1473
  ])
1471
1474
  base_dir_path = shlex.quote(base_dir_path)
1472
- sub_path = (f'/{self._bucket_sub_path}'
1473
- if self._bucket_sub_path else '')
1474
1475
  sync_command = ('aws s3 sync --no-follow-symlinks --exclude="*" '
1475
1476
  f'{includes} {base_dir_path} '
1476
1477
  f's3://{self.name}{sub_path}')
@@ -1485,8 +1486,6 @@ class S3Store(AbstractStore):
1485
1486
  for file_name in excluded_list
1486
1487
  ])
1487
1488
  src_dir_path = shlex.quote(src_dir_path)
1488
- sub_path = (f'/{self._bucket_sub_path}'
1489
- if self._bucket_sub_path else '')
1490
1489
  sync_command = (f'aws s3 sync --no-follow-symlinks {excludes} '
1491
1490
  f'{src_dir_path} '
1492
1491
  f's3://{self.name}{sub_path}/{dest_dir_name}')
@@ -1500,7 +1499,7 @@ class S3Store(AbstractStore):
1500
1499
 
1501
1500
  log_path = sky_logging.generate_tmp_logging_file_path(
1502
1501
  _STORAGE_LOG_FILE_NAME)
1503
- sync_path = f'{source_message} -> s3://{self.name}/'
1502
+ sync_path = f'{source_message} -> s3://{self.name}{sub_path}/'
1504
1503
  with rich_utils.safe_status(
1505
1504
  ux_utils.spinner_message(f'Syncing {sync_path}',
1506
1505
  log_path=log_path)):
@@ -1959,11 +1958,13 @@ class GcsStore(AbstractStore):
1959
1958
  copy_list = '\n'.join(
1960
1959
  os.path.abspath(os.path.expanduser(p)) for p in source_path_list)
1961
1960
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
1961
+ sub_path = (f'/{self._bucket_sub_path}'
1962
+ if self._bucket_sub_path else '')
1962
1963
  sync_command = (f'{alias_gen}; echo "{copy_list}" | {gsutil_alias} '
1963
- f'cp -e -n -r -I gs://{self.name}')
1964
+ f'cp -e -n -r -I gs://{self.name}{sub_path}')
1964
1965
  log_path = sky_logging.generate_tmp_logging_file_path(
1965
1966
  _STORAGE_LOG_FILE_NAME)
1966
- sync_path = f'{source_message} -> gs://{self.name}/'
1967
+ sync_path = f'{source_message} -> gs://{self.name}{sub_path}/'
1967
1968
  with rich_utils.safe_status(
1968
1969
  ux_utils.spinner_message(f'Syncing {sync_path}',
1969
1970
  log_path=log_path)):
@@ -1995,13 +1996,13 @@ class GcsStore(AbstractStore):
1995
1996
  set to True, the directory is created in the bucket root and
1996
1997
  contents are uploaded to it.
1997
1998
  """
1999
+ sub_path = (f'/{self._bucket_sub_path}'
2000
+ if self._bucket_sub_path else '')
1998
2001
 
1999
2002
  def get_file_sync_command(base_dir_path, file_names):
2000
2003
  sync_format = '|'.join(file_names)
2001
2004
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
2002
2005
  base_dir_path = shlex.quote(base_dir_path)
2003
- sub_path = (f'/{self._bucket_sub_path}'
2004
- if self._bucket_sub_path else '')
2005
2006
  sync_command = (f'{alias_gen}; {gsutil_alias} '
2006
2007
  f'rsync -e -x \'^(?!{sync_format}$).*\' '
2007
2008
  f'{base_dir_path} gs://{self.name}{sub_path}')
@@ -2014,8 +2015,6 @@ class GcsStore(AbstractStore):
2014
2015
  excludes = '|'.join(excluded_list)
2015
2016
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
2016
2017
  src_dir_path = shlex.quote(src_dir_path)
2017
- sub_path = (f'/{self._bucket_sub_path}'
2018
- if self._bucket_sub_path else '')
2019
2018
  sync_command = (f'{alias_gen}; {gsutil_alias} '
2020
2019
  f'rsync -e -r -x \'({excludes})\' {src_dir_path} '
2021
2020
  f'gs://{self.name}{sub_path}/{dest_dir_name}')
@@ -2029,7 +2028,7 @@ class GcsStore(AbstractStore):
2029
2028
 
2030
2029
  log_path = sky_logging.generate_tmp_logging_file_path(
2031
2030
  _STORAGE_LOG_FILE_NAME)
2032
- sync_path = f'{source_message} -> gs://{self.name}/'
2031
+ sync_path = f'{source_message} -> gs://{self.name}{sub_path}/'
2033
2032
  with rich_utils.safe_status(
2034
2033
  ux_utils.spinner_message(f'Syncing {sync_path}',
2035
2034
  log_path=log_path)):
@@ -2307,15 +2306,24 @@ class AzureBlobStore(AbstractStore):
2307
2306
  An instance of AzureBlobStore.
2308
2307
  """
2309
2308
  assert isinstance(metadata, AzureBlobStore.AzureBlobStoreMetadata)
2310
- return cls(name=override_args.get('name', metadata.name),
2311
- storage_account_name=override_args.get(
2312
- 'storage_account', metadata.storage_account_name),
2313
- source=override_args.get('source', metadata.source),
2314
- region=override_args.get('region', metadata.region),
2315
- is_sky_managed=override_args.get('is_sky_managed',
2316
- metadata.is_sky_managed),
2317
- sync_on_reconstruction=override_args.get(
2318
- 'sync_on_reconstruction', True))
2309
+ # TODO: this needs to be kept in sync with the abstract
2310
+ # AbstractStore.from_metadata.
2311
+ return cls(
2312
+ name=override_args.get('name', metadata.name),
2313
+ storage_account_name=override_args.get(
2314
+ 'storage_account', metadata.storage_account_name),
2315
+ source=override_args.get('source', metadata.source),
2316
+ region=override_args.get('region', metadata.region),
2317
+ is_sky_managed=override_args.get('is_sky_managed',
2318
+ metadata.is_sky_managed),
2319
+ sync_on_reconstruction=override_args.get('sync_on_reconstruction',
2320
+ True),
2321
+ # Backward compatibility
2322
+ # TODO: remove the hasattr check after v0.11.0
2323
+ _bucket_sub_path=override_args.get(
2324
+ '_bucket_sub_path',
2325
+ metadata._bucket_sub_path # pylint: disable=protected-access
2326
+ ) if hasattr(metadata, '_bucket_sub_path') else None)
2319
2327
 
2320
2328
  def get_metadata(self) -> AzureBlobStoreMetadata:
2321
2329
  return self.AzureBlobStoreMetadata(
@@ -2795,6 +2803,8 @@ class AzureBlobStore(AbstractStore):
2795
2803
  set to True, the directory is created in the bucket root and
2796
2804
  contents are uploaded to it.
2797
2805
  """
2806
+ container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2807
+ if self._bucket_sub_path else self.container_name)
2798
2808
 
2799
2809
  def get_file_sync_command(base_dir_path, file_names) -> str:
2800
2810
  # shlex.quote is not used for file_names as 'az storage blob sync'
@@ -2803,8 +2813,6 @@ class AzureBlobStore(AbstractStore):
2803
2813
  includes_list = ';'.join(file_names)
2804
2814
  includes = f'--include-pattern "{includes_list}"'
2805
2815
  base_dir_path = shlex.quote(base_dir_path)
2806
- container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2807
- if self._bucket_sub_path else self.container_name)
2808
2816
  sync_command = (f'az storage blob sync '
2809
2817
  f'--account-name {self.storage_account_name} '
2810
2818
  f'--account-key {self.storage_account_key} '
@@ -2822,18 +2830,17 @@ class AzureBlobStore(AbstractStore):
2822
2830
  [file_name.rstrip('*') for file_name in excluded_list])
2823
2831
  excludes = f'--exclude-path "{excludes_list}"'
2824
2832
  src_dir_path = shlex.quote(src_dir_path)
2825
- container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2826
- if self._bucket_sub_path else
2827
- f'{self.container_name}')
2828
2833
  if dest_dir_name:
2829
- container_path = f'{container_path}/{dest_dir_name}'
2834
+ dest_dir_name = f'/{dest_dir_name}'
2835
+ else:
2836
+ dest_dir_name = ''
2830
2837
  sync_command = (f'az storage blob sync '
2831
2838
  f'--account-name {self.storage_account_name} '
2832
2839
  f'--account-key {self.storage_account_key} '
2833
2840
  f'{excludes} '
2834
2841
  '--delete-destination false '
2835
2842
  f'--source {src_dir_path} '
2836
- f'--container {container_path}')
2843
+ f'--container {container_path}{dest_dir_name}')
2837
2844
  return sync_command
2838
2845
 
2839
2846
  # Generate message for upload
@@ -2844,7 +2851,7 @@ class AzureBlobStore(AbstractStore):
2844
2851
  source_message = source_path_list[0]
2845
2852
  container_endpoint = data_utils.AZURE_CONTAINER_URL.format(
2846
2853
  storage_account_name=self.storage_account_name,
2847
- container_name=self.name)
2854
+ container_name=container_path)
2848
2855
  log_path = sky_logging.generate_tmp_logging_file_path(
2849
2856
  _STORAGE_LOG_FILE_NAME)
2850
2857
  sync_path = f'{source_message} -> {container_endpoint}/'
@@ -3238,6 +3245,8 @@ class R2Store(AbstractStore):
3238
3245
  set to True, the directory is created in the bucket root and
3239
3246
  contents are uploaded to it.
3240
3247
  """
3248
+ sub_path = (f'/{self._bucket_sub_path}'
3249
+ if self._bucket_sub_path else '')
3241
3250
 
3242
3251
  def get_file_sync_command(base_dir_path, file_names):
3243
3252
  includes = ' '.join([
@@ -3246,8 +3255,6 @@ class R2Store(AbstractStore):
3246
3255
  ])
3247
3256
  endpoint_url = cloudflare.create_endpoint()
3248
3257
  base_dir_path = shlex.quote(base_dir_path)
3249
- sub_path = (f'/{self._bucket_sub_path}'
3250
- if self._bucket_sub_path else '')
3251
3258
  sync_command = ('AWS_SHARED_CREDENTIALS_FILE='
3252
3259
  f'{cloudflare.R2_CREDENTIALS_PATH} '
3253
3260
  'aws s3 sync --no-follow-symlinks --exclude="*" '
@@ -3267,8 +3274,6 @@ class R2Store(AbstractStore):
3267
3274
  ])
3268
3275
  endpoint_url = cloudflare.create_endpoint()
3269
3276
  src_dir_path = shlex.quote(src_dir_path)
3270
- sub_path = (f'/{self._bucket_sub_path}'
3271
- if self._bucket_sub_path else '')
3272
3277
  sync_command = ('AWS_SHARED_CREDENTIALS_FILE='
3273
3278
  f'{cloudflare.R2_CREDENTIALS_PATH} '
3274
3279
  f'aws s3 sync --no-follow-symlinks {excludes} '
@@ -3286,7 +3291,7 @@ class R2Store(AbstractStore):
3286
3291
 
3287
3292
  log_path = sky_logging.generate_tmp_logging_file_path(
3288
3293
  _STORAGE_LOG_FILE_NAME)
3289
- sync_path = f'{source_message} -> r2://{self.name}/'
3294
+ sync_path = f'{source_message} -> r2://{self.name}{sub_path}/'
3290
3295
  with rich_utils.safe_status(
3291
3296
  ux_utils.spinner_message(f'Syncing {sync_path}',
3292
3297
  log_path=log_path)):
@@ -3710,6 +3715,8 @@ class IBMCosStore(AbstractStore):
3710
3715
  set to True, the directory is created in the bucket root and
3711
3716
  contents are uploaded to it.
3712
3717
  """
3718
+ sub_path = (f'/{self._bucket_sub_path}'
3719
+ if self._bucket_sub_path else '')
3713
3720
 
3714
3721
  def get_dir_sync_command(src_dir_path, dest_dir_name) -> str:
3715
3722
  """returns an rclone command that copies a complete folder
@@ -3731,8 +3738,6 @@ class IBMCosStore(AbstractStore):
3731
3738
  # .git directory is excluded from the sync
3732
3739
  # wrapping src_dir_path with "" to support path with spaces
3733
3740
  src_dir_path = shlex.quote(src_dir_path)
3734
- sub_path = (f'/{self._bucket_sub_path}'
3735
- if self._bucket_sub_path else '')
3736
3741
  sync_command = (
3737
3742
  'rclone copy --exclude ".git/*" '
3738
3743
  f'{src_dir_path} '
@@ -3763,8 +3768,6 @@ class IBMCosStore(AbstractStore):
3763
3768
  for file_name in file_names
3764
3769
  ])
3765
3770
  base_dir_path = shlex.quote(base_dir_path)
3766
- sub_path = (f'/{self._bucket_sub_path}'
3767
- if self._bucket_sub_path else '')
3768
3771
  sync_command = (
3769
3772
  'rclone copy '
3770
3773
  f'{includes} {base_dir_path} '
@@ -3779,7 +3782,8 @@ class IBMCosStore(AbstractStore):
3779
3782
 
3780
3783
  log_path = sky_logging.generate_tmp_logging_file_path(
3781
3784
  _STORAGE_LOG_FILE_NAME)
3782
- sync_path = f'{source_message} -> cos://{self.region}/{self.name}/'
3785
+ sync_path = (
3786
+ f'{source_message} -> cos://{self.region}/{self.name}{sub_path}/')
3783
3787
  with rich_utils.safe_status(
3784
3788
  ux_utils.spinner_message(f'Syncing {sync_path}',
3785
3789
  log_path=log_path)):
@@ -4178,15 +4182,21 @@ class OciStore(AbstractStore):
4178
4182
  set to True, the directory is created in the bucket root and
4179
4183
  contents are uploaded to it.
4180
4184
  """
4185
+ sub_path = (f'{self._bucket_sub_path}/'
4186
+ if self._bucket_sub_path else '')
4181
4187
 
4182
4188
  @oci.with_oci_env
4183
4189
  def get_file_sync_command(base_dir_path, file_names):
4184
4190
  includes = ' '.join(
4185
4191
  [f'--include "{file_name}"' for file_name in file_names])
4192
+ prefix_arg = ''
4193
+ if sub_path:
4194
+ prefix_arg = f'--object-prefix "{sub_path.strip("/")}"'
4186
4195
  sync_command = (
4187
4196
  'oci os object bulk-upload --no-follow-symlinks --overwrite '
4188
4197
  f'--bucket-name {self.name} --namespace-name {self.namespace} '
4189
4198
  f'--region {self.region} --src-dir "{base_dir_path}" '
4199
+ f'{prefix_arg} '
4190
4200
  f'{includes}')
4191
4201
 
4192
4202
  return sync_command
@@ -4207,7 +4217,8 @@ class OciStore(AbstractStore):
4207
4217
  sync_command = (
4208
4218
  'oci os object bulk-upload --no-follow-symlinks --overwrite '
4209
4219
  f'--bucket-name {self.name} --namespace-name {self.namespace} '
4210
- f'--region {self.region} --object-prefix "{dest_dir_name}" '
4220
+ f'--region {self.region} '
4221
+ f'--object-prefix "{sub_path}{dest_dir_name}" '
4211
4222
  f'--src-dir "{src_dir_path}" {excludes}')
4212
4223
 
4213
4224
  return sync_command
@@ -4220,7 +4231,7 @@ class OciStore(AbstractStore):
4220
4231
 
4221
4232
  log_path = sky_logging.generate_tmp_logging_file_path(
4222
4233
  _STORAGE_LOG_FILE_NAME)
4223
- sync_path = f'{source_message} -> oci://{self.name}/'
4234
+ sync_path = f'{source_message} -> oci://{self.name}/{sub_path}'
4224
4235
  with rich_utils.safe_status(
4225
4236
  ux_utils.spinner_message(f'Syncing {sync_path}',
4226
4237
  log_path=log_path)):
@@ -450,6 +450,13 @@ def _post_provision_setup(
450
450
  logger.info(f'{indent_str}{colorama.Style.DIM}{vm_str}{plural} {verb} '
451
451
  f'up.{colorama.Style.RESET_ALL}')
452
452
 
453
+ # It's promised by the cluster config that docker_config does not
454
+ # exist for docker-native clouds, i.e. they provide docker containers
455
+ # instead of full VMs, like Kubernetes and RunPod, as it requires some
456
+ # special handlings to run docker inside their docker virtualization.
457
+ # For their Docker image settings, we do them when provisioning the
458
+ # cluster. See provision/{cloud}/instance.py:get_cluster_info for more
459
+ # details.
453
460
  if docker_config:
454
461
  status.update(
455
462
  ux_utils.spinner_message(
@@ -186,7 +186,7 @@ def delete_pod_template(template_name: str) -> None:
186
186
  runpod.runpod.api.graphql.run_graphql_query(
187
187
  f'mutation {{deleteTemplate(templateName: "{template_name}")}}')
188
188
  except runpod.runpod.error.QueryError as e:
189
- logger.warning(f'Failed to delete template {template_name}: {e}'
189
+ logger.warning(f'Failed to delete template {template_name}: {e} '
190
190
  'Please delete it manually.')
191
191
 
192
192
 
@@ -195,8 +195,9 @@ def delete_register_auth(registry_auth_id: str) -> None:
195
195
  try:
196
196
  runpod.runpod.delete_container_registry_auth(registry_auth_id)
197
197
  except runpod.runpod.error.QueryError as e:
198
- logger.warning(f'Failed to delete registry auth {registry_auth_id}: {e}'
199
- 'Please delete it manually.')
198
+ logger.warning(
199
+ f'Failed to delete registry auth {registry_auth_id}: {e} '
200
+ 'Please delete it manually.')
200
201
 
201
202
 
202
203
  def _create_template_for_docker_login(
sky/resources.py CHANGED
@@ -67,6 +67,7 @@ class Resources:
67
67
  # Internal use only.
68
68
  # pylint: disable=invalid-name
69
69
  _docker_login_config: Optional[docker_utils.DockerLoginConfig] = None,
70
+ _docker_username_for_runpod: Optional[str] = None,
70
71
  _is_image_managed: Optional[bool] = None,
71
72
  _requires_fuse: Optional[bool] = None,
72
73
  _cluster_config_overrides: Optional[Dict[str, Any]] = None,
@@ -148,6 +149,9 @@ class Resources:
148
149
  _docker_login_config: the docker configuration to use. This includes
149
150
  the docker username, password, and registry server. If None, skip
150
151
  docker login.
152
+ _docker_username_for_runpod: the login username for the docker
153
+ containers. This is used by RunPod to set the ssh user for the
154
+ docker containers.
151
155
  _requires_fuse: whether the task requires FUSE mounting support. This
152
156
  is used internally by certain cloud implementations to do additional
153
157
  setup for FUSE mounting. This flag also safeguards against using
@@ -234,6 +238,12 @@ class Resources:
234
238
 
235
239
  self._docker_login_config = _docker_login_config
236
240
 
241
+ # TODO(andyl): This ctor param seems to be unused.
242
+ # We always use `Task.set_resources` and `Resources.copy` to set the
243
+ # `docker_username_for_runpod`. But to keep the consistency with
244
+ # `_docker_login_config`, we keep it here.
245
+ self._docker_username_for_runpod = _docker_username_for_runpod
246
+
237
247
  self._requires_fuse = _requires_fuse
238
248
 
239
249
  self._cluster_config_overrides = _cluster_config_overrides
@@ -479,6 +489,10 @@ class Resources:
479
489
  def requires_fuse(self, value: Optional[bool]) -> None:
480
490
  self._requires_fuse = value
481
491
 
492
+ @property
493
+ def docker_username_for_runpod(self) -> Optional[str]:
494
+ return self._docker_username_for_runpod
495
+
482
496
  def _set_cpus(
483
497
  self,
484
498
  cpus: Union[None, int, float, str],
@@ -1065,6 +1079,10 @@ class Resources:
1065
1079
  cloud_specific_variables = self.cloud.make_deploy_resources_variables(
1066
1080
  self, cluster_name, region, zones, num_nodes, dryrun)
1067
1081
 
1082
+ # TODO(andyl): Should we print some warnings if users' envs share
1083
+ # same names with the cloud specific variables, but not enabled
1084
+ # since it's not on the particular cloud?
1085
+
1068
1086
  # Docker run options
1069
1087
  docker_run_options = skypilot_config.get_nested(
1070
1088
  ('docker', 'run_options'),
@@ -1277,6 +1295,9 @@ class Resources:
1277
1295
  labels=override.pop('labels', self.labels),
1278
1296
  _docker_login_config=override.pop('_docker_login_config',
1279
1297
  self._docker_login_config),
1298
+ _docker_username_for_runpod=override.pop(
1299
+ '_docker_username_for_runpod',
1300
+ self._docker_username_for_runpod),
1280
1301
  _is_image_managed=override.pop('_is_image_managed',
1281
1302
  self._is_image_managed),
1282
1303
  _requires_fuse=override.pop('_requires_fuse', self._requires_fuse),
@@ -1438,6 +1459,8 @@ class Resources:
1438
1459
  resources_fields['labels'] = config.pop('labels', None)
1439
1460
  resources_fields['_docker_login_config'] = config.pop(
1440
1461
  '_docker_login_config', None)
1462
+ resources_fields['_docker_username_for_runpod'] = config.pop(
1463
+ '_docker_username_for_runpod', None)
1441
1464
  resources_fields['_is_image_managed'] = config.pop(
1442
1465
  '_is_image_managed', None)
1443
1466
  resources_fields['_requires_fuse'] = config.pop('_requires_fuse', None)
@@ -1486,6 +1509,9 @@ class Resources:
1486
1509
  if self._docker_login_config is not None:
1487
1510
  config['_docker_login_config'] = dataclasses.asdict(
1488
1511
  self._docker_login_config)
1512
+ if self._docker_username_for_runpod is not None:
1513
+ config['_docker_username_for_runpod'] = (
1514
+ self._docker_username_for_runpod)
1489
1515
  add_if_not_none('_cluster_config_overrides',
1490
1516
  self._cluster_config_overrides)
1491
1517
  if self._is_image_managed is not None:
sky/skylet/constants.py CHANGED
@@ -110,6 +110,8 @@ DOCKER_LOGIN_ENV_VARS = {
110
110
  DOCKER_SERVER_ENV_VAR,
111
111
  }
112
112
 
113
+ RUNPOD_DOCKER_USERNAME_ENV_VAR = 'SKYPILOT_RUNPOD_DOCKER_USERNAME'
114
+
113
115
  # Commands for disable GPU ECC, which can improve the performance of the GPU
114
116
  # for some workloads by 30%. This will only be applied when a user specify
115
117
  # `nvidia_gpus.disable_ecc: true` in ~/.sky/config.yaml.
sky/task.py CHANGED
@@ -121,6 +121,9 @@ def _check_docker_login_config(task_envs: Dict[str, str]) -> bool:
121
121
 
122
122
  If any of the docker login env vars is set, all of them must be set.
123
123
 
124
+ Returns:
125
+ True if there is a valid docker login config in task_envs.
126
+ False otherwise.
124
127
  Raises:
125
128
  ValueError: if any of the docker login env vars is set, but not all of
126
129
  them are set.
@@ -168,6 +171,23 @@ def _with_docker_login_config(
168
171
  return type(resources)(new_resources)
169
172
 
170
173
 
174
+ def _with_docker_username_for_runpod(
175
+ resources: Union[Set['resources_lib.Resources'],
176
+ List['resources_lib.Resources']],
177
+ task_envs: Dict[str, str],
178
+ ) -> Union[Set['resources_lib.Resources'], List['resources_lib.Resources']]:
179
+ docker_username_for_runpod = task_envs.get(
180
+ constants.RUNPOD_DOCKER_USERNAME_ENV_VAR)
181
+
182
+ # We should not call r.copy() if docker_username_for_runpod is None,
183
+ # to prevent `DummyResources` instance becoming a `Resources` instance.
184
+ if docker_username_for_runpod is None:
185
+ return resources
186
+ return (type(resources)(
187
+ r.copy(_docker_username_for_runpod=docker_username_for_runpod)
188
+ for r in resources))
189
+
190
+
171
191
  class Task:
172
192
  """Task: a computation to be run on the cloud."""
173
193
 
@@ -582,6 +602,8 @@ class Task:
582
602
  if _check_docker_login_config(self._envs):
583
603
  self.resources = _with_docker_login_config(self.resources,
584
604
  self._envs)
605
+ self.resources = _with_docker_username_for_runpod(
606
+ self.resources, self._envs)
585
607
  return self
586
608
 
587
609
  @property
@@ -647,6 +669,9 @@ class Task:
647
669
  resources = {resources}
648
670
  # TODO(woosuk): Check if the resources are None.
649
671
  self.resources = _with_docker_login_config(resources, self.envs)
672
+ # Only have effect on RunPod.
673
+ self.resources = _with_docker_username_for_runpod(
674
+ self.resources, self.envs)
650
675
 
651
676
  # Evaluate if the task requires FUSE and set the requires_fuse flag
652
677
  for _, storage_obj in self.storage_mounts.items():
@@ -25,7 +25,7 @@ provider:
25
25
  {%- endif %}
26
26
 
27
27
  auth:
28
- ssh_user: root
28
+ ssh_user: {{docker_username_for_runpod}}
29
29
  ssh_private_key: {{ssh_private_key}}
30
30
 
31
31
  available_node_types:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: skypilot-nightly
3
- Version: 1.0.0.dev20250212
3
+ Version: 1.0.0.dev20250214
4
4
  Summary: SkyPilot: An intercloud broker for the clouds
5
5
  Author: SkyPilot Team
6
6
  License: Apache 2.0
@@ -184,7 +184,9 @@ Dynamic: summary
184
184
 
185
185
  ----
186
186
  :fire: *News* :fire:
187
- - [Jan 2025] Launch and Serve **[DeepSeek-R1](https://github.com/deepseek-ai/DeepSeek-R1)** and **[Janus](https://github.com/deepseek-ai/DeepSeek-Janus)** on Kubernetes or Any Cloud: [**R1 example**](./llm/deepseek-r1/) and [**Janus example**](./llm/deepseek-janus/)
187
+ - [Feb 2025] Run and Serve DeepSeek-R1 671B using SkyPilot and SGLang with high throughput: [**example**](./llm/deepseek-r1/)
188
+ - [Jan 2025] Prepare and Serve Large-Scale Image Search with **Vector Database**: [**blog post**](https://blog.skypilot.co/large-scale-vector-database/) [**example**](./examples/vector_database/)
189
+ - [Jan 2025] Launch and Serve distilled models from **[DeepSeek-R1](https://github.com/deepseek-ai/DeepSeek-R1)** and **[Janus](https://github.com/deepseek-ai/DeepSeek-Janus)** on Kubernetes or Any Cloud: [**R1 example**](./llm/deepseek-r1-distilled/) and [**Janus example**](./llm/deepseek-janus/)
188
190
  - [Oct 2024] :tada: **SkyPilot crossed 1M+ downloads** :tada:: Thank you to our community! [**Twitter/X**](https://x.com/skypilot_org/status/1844770841718067638)
189
191
  - [Sep 2024] Point, Launch and Serve **Llama 3.2** on Kubernetes or Any Cloud: [**example**](./llm/llama-3_2/)
190
192
  - [Sep 2024] Run and deploy [**Pixtral**](./llm/pixtral), the first open-source multimodal model from Mistral AI.
@@ -342,7 +344,7 @@ Runnable examples:
342
344
  - [LocalGPT](./llm/localgpt)
343
345
  - [Falcon](./llm/falcon)
344
346
  - Add yours here & see more in [`llm/`](./llm)!
345
- - Framework examples: [PyTorch DDP](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_torch.yaml), [DeepSpeed](./examples/deepspeed-multinode/sky.yaml), [JAX/Flax on TPU](https://github.com/skypilot-org/skypilot/blob/master/examples/tpu/tpuvm_mnist.yaml), [Stable Diffusion](https://github.com/skypilot-org/skypilot/tree/master/examples/stable_diffusion), [Detectron2](https://github.com/skypilot-org/skypilot/blob/master/examples/detectron2_docker.yaml), [Distributed](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_tf_app.py) [TensorFlow](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_app_storage.yaml), [Ray Train](examples/distributed_ray_train/ray_train.yaml), [NeMo](https://github.com/skypilot-org/skypilot/blob/master/examples/nemo/), [programmatic grid search](https://github.com/skypilot-org/skypilot/blob/master/examples/huggingface_glue_imdb_grid_search_app.py), [Docker](https://github.com/skypilot-org/skypilot/blob/master/examples/docker/echo_app.yaml), [Cog](https://github.com/skypilot-org/skypilot/blob/master/examples/cog/), [Unsloth](https://github.com/skypilot-org/skypilot/blob/master/examples/unsloth/unsloth.yaml), [Ollama](https://github.com/skypilot-org/skypilot/blob/master/llm/ollama), [llm.c](https://github.com/skypilot-org/skypilot/tree/master/llm/gpt-2), [Airflow](./examples/airflow/training_workflow) and [many more (`examples/`)](./examples).
347
+ - Framework examples: [Vector Database](./examples/vector_database/), [PyTorch DDP](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_torch.yaml), [DeepSpeed](./examples/deepspeed-multinode/sky.yaml), [JAX/Flax on TPU](https://github.com/skypilot-org/skypilot/blob/master/examples/tpu/tpuvm_mnist.yaml), [Stable Diffusion](https://github.com/skypilot-org/skypilot/tree/master/examples/stable_diffusion), [Detectron2](https://github.com/skypilot-org/skypilot/blob/master/examples/detectron2_docker.yaml), [Distributed](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_tf_app.py) [TensorFlow](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_app_storage.yaml), [Ray Train](examples/distributed_ray_train/ray_train.yaml), [NeMo](https://github.com/skypilot-org/skypilot/blob/master/examples/nemo/), [programmatic grid search](https://github.com/skypilot-org/skypilot/blob/master/examples/huggingface_glue_imdb_grid_search_app.py), [Docker](https://github.com/skypilot-org/skypilot/blob/master/examples/docker/echo_app.yaml), [Cog](https://github.com/skypilot-org/skypilot/blob/master/examples/cog/), [Unsloth](https://github.com/skypilot-org/skypilot/blob/master/examples/unsloth/unsloth.yaml), [Ollama](https://github.com/skypilot-org/skypilot/blob/master/llm/ollama), [llm.c](https://github.com/skypilot-org/skypilot/tree/master/llm/gpt-2), [Airflow](./examples/airflow/training_workflow) and [many more (`examples/`)](./examples).
346
348
 
347
349
  Case Studies and Integrations: [Community Spotlights](https://blog.skypilot.co/community/)
348
350
 
@@ -1,4 +1,4 @@
1
- sky/__init__.py,sha256=GRvhpT8lUvIyIPLyC2cqv9RiI6hZ0_iMY56YNDpjPbs,5560
1
+ sky/__init__.py,sha256=hnw0sI17qKlRS50e2Ux9neBg1F8HcuZMLdlV7S9ziIE,5560
2
2
  sky/admin_policy.py,sha256=hPo02f_A32gCqhUueF0QYy1fMSSKqRwYEg_9FxScN_s,3248
3
3
  sky/authentication.py,sha256=MNc9uHnvQ1EsEl8SsrYcYCGbxcnDbR6gaRCXVNd5RZE,22338
4
4
  sky/check.py,sha256=xzLlxUkBCrzpOho8lw65EvKLPl_b9lA2nteF5MSYbDQ,10885
@@ -10,11 +10,11 @@ sky/exceptions.py,sha256=SEhRubPlk-crkflPC5P_Z085iLrSd3UScYwc790QwYw,9378
10
10
  sky/execution.py,sha256=vNUE9Z8hCSQeil7h3kdote2r6nkbrGXSqqmK6ru594Q,28453
11
11
  sky/global_user_state.py,sha256=cTwltMCDIIBaapuGgARxFwpDJDCiKKyVW-PP_qtWuCA,30241
12
12
  sky/optimizer.py,sha256=H5cpKELOQmnFpox0QXMB4P7jGhJxzXog4Ht_TYJaGuA,59758
13
- sky/resources.py,sha256=D3jteQxKOUydoNm7VDl90p02dwP3RpbO3gqNcl4dpOI,70327
13
+ sky/resources.py,sha256=W7VO5nTizr-KIhOamOs7oSwmBGLjQZhQM6DoYbiAOsg,71648
14
14
  sky/sky_logging.py,sha256=7Zk9mL1TDxFkGsy3INMBKYlqsbognVGSMzAsHZdZlhw,5891
15
15
  sky/skypilot_config.py,sha256=FN93hSG-heQCHBnemlIK2TwrJngKbpx4vMXNUzPIzV8,9087
16
16
  sky/status_lib.py,sha256=J7Jb4_Dz0v2T64ttOdyUgpokvl4S0sBJrMfH7Fvo51A,1457
17
- sky/task.py,sha256=zri5_Ghh5-fjDf2rl997ZmL4IlXetW9u9XXJIRUJ3Qg,51353
17
+ sky/task.py,sha256=Z74bBkOx1bFmGMoQRD3qbYxIZ5qi2AC5htY1KIsmvT0,52394
18
18
  sky/adaptors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  sky/adaptors/aws.py,sha256=FNNC8B-iqSSiCLPiWGK_PLm1R-Kt4yI5JPIpdE0QJxQ,7565
20
20
  sky/adaptors/azure.py,sha256=yjM8nAPW-mlSXfmA8OmJNnSIrZ9lQx2-GxiI-TIVrwE,21910
@@ -51,11 +51,11 @@ sky/clouds/do.py,sha256=od4gMTrs2W5IkaDDr_oexOSdIOqn94vKq2U_QZcrpRk,11311
51
51
  sky/clouds/fluidstack.py,sha256=u2I6jXEtTqgqRWi2EafMsKqc8VkUq1cR6CSDUvk72_U,12407
52
52
  sky/clouds/gcp.py,sha256=6QOnefFsYiLCcnajjduLHsayqJ641bBu42jPTpvy7Mc,55007
53
53
  sky/clouds/ibm.py,sha256=0ArRTQx1_DpTNGByFhukzFedEDzmVjBsGiiques1bQ0,21447
54
- sky/clouds/kubernetes.py,sha256=oZg4Lpn2ZBikyc5NTJIziUPEY0xs2mtz546ButhkZ7g,31541
54
+ sky/clouds/kubernetes.py,sha256=ocf8ZUlMbOcPb-n8JrSFix9rH75g089sC1JAd84JUXQ,31653
55
55
  sky/clouds/lambda_cloud.py,sha256=42AmcN2X_wdBMuAw606nR_pQCBAy5QFiAo711_WRqDE,12672
56
56
  sky/clouds/oci.py,sha256=VpPxpMJv52QePVdwdK9EuiMyqjp70dk8_rgUVv5Y-2w,27028
57
57
  sky/clouds/paperspace.py,sha256=F0Sj1RcqRb5fPjrr8qbdeY-JdfAHcRPc902pZOi4erw,10889
58
- sky/clouds/runpod.py,sha256=Wtaarp27_LTu5_E2agC7tTr2vhN1D4sblr2vZTT4vBI,11580
58
+ sky/clouds/runpod.py,sha256=4gZTbUO4I8bzio5x9Km42ZIujZzCx2eszdkN0Mz5fqE,11893
59
59
  sky/clouds/scp.py,sha256=JHyMqkAAqr9lJq79IVjj3rU1g-ZCCGLZTJEzIhYsw7c,15845
60
60
  sky/clouds/vast.py,sha256=vQV489qkZMfDtt_SnXParPY49gkgKx5LZAEOsk65kIo,11231
61
61
  sky/clouds/vsphere.py,sha256=rrNf6_uHy4ukjHwaN35XVh2-Xj9k43-QGQkiEXyHYJk,12273
@@ -85,7 +85,7 @@ sky/clouds/service_catalog/data_fetchers/fetch_azure.py,sha256=7YVnoGDGGZI2TK02b
85
85
  sky/clouds/service_catalog/data_fetchers/fetch_cudo.py,sha256=52P48lvWN0s1ArjeLPeLemPRpxjSRcHincRle0nqdm4,3440
86
86
  sky/clouds/service_catalog/data_fetchers/fetch_fluidstack.py,sha256=yKuAFbjBRNz_e2RNNDT_aHHAuKQ86Ac7GKgIie5O6Pg,7273
87
87
  sky/clouds/service_catalog/data_fetchers/fetch_gcp.py,sha256=HLxdxA9DMSi19mgpVM_cERV4o-xh_tJ9vmkGm1wOaIE,30868
88
- sky/clouds/service_catalog/data_fetchers/fetch_lambda_cloud.py,sha256=MN54h0CAGPHQAeF2eTmuESq3b0-d1kDARRUM6OkivCk,4962
88
+ sky/clouds/service_catalog/data_fetchers/fetch_lambda_cloud.py,sha256=Bi5ta91p4SkFCoaEJUPKPjDB0FZ24DMR4NcKFpKCkxU,4979
89
89
  sky/clouds/service_catalog/data_fetchers/fetch_vast.py,sha256=zR9icM3ty5C8tGw13pQbsBtQQMgG4kl1j_jSGqqrgOA,4741
90
90
  sky/clouds/service_catalog/data_fetchers/fetch_vsphere.py,sha256=Opp2r3KSzXPtwk3lKNbO8IX9QzjoRSwy1kW3jPjtS1c,21453
91
91
  sky/clouds/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -98,7 +98,7 @@ sky/data/__init__.py,sha256=Nhaf1NURisXpZuwWANa2IuCyppIuc720FRwqSE2oEwY,184
98
98
  sky/data/data_transfer.py,sha256=wixC4_3_JaeJFdGKOp-O5ulcsMugDSgrCR0SnPpugGc,8946
99
99
  sky/data/data_utils.py,sha256=HjcgMDuWRR_fNQ9gjuROi9GgPVvTGApiJwxGtdb2_UU,28860
100
100
  sky/data/mounting_utils.py,sha256=tJHBPEDP1Wg_r3oSGBwFhMDLnPCMPSFRz26O0QkDd0Y,14908
101
- sky/data/storage.py,sha256=CWVKnHhdzXw1biPbRqYizkyVexL_OCELuJCqtd4hit4,204094
101
+ sky/data/storage.py,sha256=vlipyOOxw43W6t5O9ccL53OYWAxPVMjF16qlfbZhdQo,204431
102
102
  sky/data/storage_utils.py,sha256=cM3kxlffYE7PnJySDu8huyUsMX_JYsf9uer8r5OYsjo,9556
103
103
  sky/jobs/__init__.py,sha256=ObZcz3lL1ip8JcmR6gbfZ4RMMfXJJdsnuU2zLQUb8jY,1546
104
104
  sky/jobs/constants.py,sha256=9kIdpwWNI9zWKQO39LTg9spUMGl5Iqx4ByIjRlic7Hw,1893
@@ -118,7 +118,7 @@ sky/provision/docker_utils.py,sha256=ENm0LkyrYWic3Ikyacho8X5uDMvGsbkZQsb6kNH1DuI
118
118
  sky/provision/instance_setup.py,sha256=YBFOwZQLBzpUjYoVQcX0KItej1rCBRWM23Dw9lg_q24,24386
119
119
  sky/provision/logging.py,sha256=yZWgejrFBhhRjAtvFu5N5bRXIMK5TuwNjp1vKQqz2pw,2103
120
120
  sky/provision/metadata_utils.py,sha256=LrxeV4wD2QPzNdXV_npj8q-pr35FatxBBjF_jSbpOT0,4013
121
- sky/provision/provisioner.py,sha256=ZOgFOO0NB4QZVPwd4qikRqi615Bq67n0Vcl3cTDVxNE,29153
121
+ sky/provision/provisioner.py,sha256=UuMO2wp98Nk1nue25TtIaMU79Aj14v9IoIAFO_CUy9w,29636
122
122
  sky/provision/aws/__init__.py,sha256=mxq8PeWJqUtalDozTNpbtENErRZ1ktEs8uf2aG9UUgU,731
123
123
  sky/provision/aws/config.py,sha256=-4mr5uxgsl_8eLm_4DfP8JurZGSysGuY0iDeBTHnX5Q,25943
124
124
  sky/provision/aws/instance.py,sha256=3-R8ohuN8ooNh2Fqqb7-c4vNFy1xsw2GQF4PHg3APhE,40843
@@ -172,7 +172,7 @@ sky/provision/paperspace/utils.py,sha256=uOmxbDKjV6skFizC4gYXSxRuEqso5ck2kF7MbtN
172
172
  sky/provision/runpod/__init__.py,sha256=6HYvHI27EaLrX1SS0vWVhdLu5HDBeZCdvAeDJuwM5pk,556
173
173
  sky/provision/runpod/config.py,sha256=9ulZJVL7nHuxhTdoj8D7lNn7SdicJ5zc6FIcHIG9tcg,321
174
174
  sky/provision/runpod/instance.py,sha256=FNalpTQMvnmACdtFsGvVPVhEkzdqrlmd_pExn33bIQ8,10358
175
- sky/provision/runpod/utils.py,sha256=2L4MUrFi4rQOP26IeAkPNQ9oWGPJhH5fvz7NAbL3nNQ,12421
175
+ sky/provision/runpod/utils.py,sha256=F3zsxPV3IY5C73J-zNZEfcXL8bsZBgNWzlmtSO6qOqI,12425
176
176
  sky/provision/runpod/api/__init__.py,sha256=eJwjPeQZ5B7chf4-Bl4YeI2Uo9aLX4M1rr2NmPk89_E,112
177
177
  sky/provision/runpod/api/commands.py,sha256=oh77PS0H0wZudHV8II9ceRuaFQ8FN4NJ4S3-6_PeqPM,4238
178
178
  sky/provision/runpod/api/pods.py,sha256=GMwxgNr9NnHPfyh2Y9b8S_vLhrLY4h7LybFBBQNAyfw,4948
@@ -215,7 +215,7 @@ sky/skylet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
215
215
  sky/skylet/attempt_skylet.py,sha256=GZ6ITjjA0m-da3IxXXfoHR6n4pjp3X3TOXUqVvSrV0k,2136
216
216
  sky/skylet/autostop_lib.py,sha256=JPDHmByuhoNYXSUHl-OnyeJUkOFWn7gDM1FrS7Kr3E8,4478
217
217
  sky/skylet/configs.py,sha256=UtnpmEL0F9hH6PSjhsps7xgjGZ6qzPOfW1p2yj9tSng,1887
218
- sky/skylet/constants.py,sha256=EUSW4yH59eqBDLMIdmQWIYd3nAJBFoUeo5v9MGiginI,16057
218
+ sky/skylet/constants.py,sha256=bTvojyYHLhUxMI-xD8d6Fj6snQiK4-PLw-baRFKCcfQ,16125
219
219
  sky/skylet/events.py,sha256=__7bt6Z8q2W1vwTQv4yug-oAXDwSf8zBeRxb8HFM36U,12792
220
220
  sky/skylet/job_lib.py,sha256=Rk-C069cusJIRXsks8xqCb016JSt7GlpU7LrpX0qFJk,42785
221
221
  sky/skylet/log_lib.py,sha256=oFEBd85vDYFrIyyZKekH30yc4rRYILC0F0o-COQ64oE,20445
@@ -256,7 +256,7 @@ sky/templates/lambda-ray.yml.j2,sha256=HyvO_tX2vxwSsc4IFVSqGuIbjLMk0bevP9bcxb8ZQ
256
256
  sky/templates/local-ray.yml.j2,sha256=FNHeyHF6nW9nU9QLIZceUWfvrFTTcO51KqhTnYCEFaA,1185
257
257
  sky/templates/oci-ray.yml.j2,sha256=92dvXGaUd2Kwep9fgTjOsAPJiBLr8GQTjy7pFvuPAyE,4562
258
258
  sky/templates/paperspace-ray.yml.j2,sha256=HQjZNamrB_a4fOMCxQXSVdV5JIHtbGtAE0JzEO8uuVQ,4021
259
- sky/templates/runpod-ray.yml.j2,sha256=bUiF4Y_EkCA_GKLtTzPXbajdL-NOUiJ38Pe4dZd2dys,4284
259
+ sky/templates/runpod-ray.yml.j2,sha256=95392Jvk-PwrEHIF2C5i-EnaqC31nTq4UTYZuQxUe1k,4310
260
260
  sky/templates/scp-ray.yml.j2,sha256=I9u8Ax-lit-d6UrCC9BVU8avst8w1cwK6TrzZBcz_JM,5608
261
261
  sky/templates/sky-serve-controller.yaml.j2,sha256=W4i1-OGRU2WDvauLC4EDXcYrNxj7mzRFSvSqzAKfehc,2020
262
262
  sky/templates/vast-ray.yml.j2,sha256=KaZLBJfI6FzAVRVq0NNM0_SN0RQUrDIehnJJ_LnvwnY,2990
@@ -298,9 +298,9 @@ sky/utils/kubernetes/k8s_gpu_labeler_job.yaml,sha256=k0TBoQ4zgf79-sVkixKSGYFHQ7Z
298
298
  sky/utils/kubernetes/k8s_gpu_labeler_setup.yaml,sha256=VLKT2KKimZu1GDg_4AIlIt488oMQvhRZWwsj9vBbPUg,3812
299
299
  sky/utils/kubernetes/rsync_helper.sh,sha256=h4YwrPFf9727CACnMJvF3EyK_0OeOYKKt4su_daKekw,1256
300
300
  sky/utils/kubernetes/ssh_jump_lifecycle_manager.py,sha256=Kq1MDygF2IxFmu9FXpCxqucXLmeUrvs6OtRij6XTQbo,6554
301
- skypilot_nightly-1.0.0.dev20250212.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
302
- skypilot_nightly-1.0.0.dev20250212.dist-info/METADATA,sha256=rkJIHWHxQtacqsQPb5SZ7XHCGiXMvMBzXNPupXqi4sU,21397
303
- skypilot_nightly-1.0.0.dev20250212.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
304
- skypilot_nightly-1.0.0.dev20250212.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
305
- skypilot_nightly-1.0.0.dev20250212.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
306
- skypilot_nightly-1.0.0.dev20250212.dist-info/RECORD,,
301
+ skypilot_nightly-1.0.0.dev20250214.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
302
+ skypilot_nightly-1.0.0.dev20250214.dist-info/METADATA,sha256=jxTYT5S0cFhCCQ6C77MoNQIW2yEJCxgX1Nz4LfPoUgI,21800
303
+ skypilot_nightly-1.0.0.dev20250214.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
304
+ skypilot_nightly-1.0.0.dev20250214.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
305
+ skypilot_nightly-1.0.0.dev20250214.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
306
+ skypilot_nightly-1.0.0.dev20250214.dist-info/RECORD,,