skypilot-nightly 1.0.0.dev20250213__py3-none-any.whl → 1.0.0.dev20250214__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sky/__init__.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import urllib.request
6
6
 
7
7
  # Replaced with the current commit when building the wheels.
8
- _SKYPILOT_COMMIT_SHA = 'c49961417a83b049b3f3435a252c8ec5ea0fb5e6'
8
+ _SKYPILOT_COMMIT_SHA = '7170a91ca2605e15290689b675bc7640999f4aa1'
9
9
 
10
10
 
11
11
  def _get_git_commit():
@@ -35,7 +35,7 @@ def _get_git_commit():
35
35
 
36
36
 
37
37
  __commit__ = _get_git_commit()
38
- __version__ = '1.0.0.dev20250213'
38
+ __version__ = '1.0.0.dev20250214'
39
39
  __root_dir__ = os.path.dirname(os.path.abspath(__file__))
40
40
 
41
41
 
sky/data/storage.py CHANGED
@@ -354,7 +354,8 @@ class AbstractStore:
354
354
  metadata.is_sky_managed),
355
355
  sync_on_reconstruction=override_args.get('sync_on_reconstruction',
356
356
  True),
357
- # backward compatibility
357
+ # Backward compatibility
358
+ # TODO: remove the hasattr check after v0.11.0
358
359
  _bucket_sub_path=override_args.get(
359
360
  '_bucket_sub_path',
360
361
  metadata._bucket_sub_path # pylint: disable=protected-access
@@ -1462,6 +1463,8 @@ class S3Store(AbstractStore):
1462
1463
  set to True, the directory is created in the bucket root and
1463
1464
  contents are uploaded to it.
1464
1465
  """
1466
+ sub_path = (f'/{self._bucket_sub_path}'
1467
+ if self._bucket_sub_path else '')
1465
1468
 
1466
1469
  def get_file_sync_command(base_dir_path, file_names):
1467
1470
  includes = ' '.join([
@@ -1469,8 +1472,6 @@ class S3Store(AbstractStore):
1469
1472
  for file_name in file_names
1470
1473
  ])
1471
1474
  base_dir_path = shlex.quote(base_dir_path)
1472
- sub_path = (f'/{self._bucket_sub_path}'
1473
- if self._bucket_sub_path else '')
1474
1475
  sync_command = ('aws s3 sync --no-follow-symlinks --exclude="*" '
1475
1476
  f'{includes} {base_dir_path} '
1476
1477
  f's3://{self.name}{sub_path}')
@@ -1485,8 +1486,6 @@ class S3Store(AbstractStore):
1485
1486
  for file_name in excluded_list
1486
1487
  ])
1487
1488
  src_dir_path = shlex.quote(src_dir_path)
1488
- sub_path = (f'/{self._bucket_sub_path}'
1489
- if self._bucket_sub_path else '')
1490
1489
  sync_command = (f'aws s3 sync --no-follow-symlinks {excludes} '
1491
1490
  f'{src_dir_path} '
1492
1491
  f's3://{self.name}{sub_path}/{dest_dir_name}')
@@ -1500,7 +1499,7 @@ class S3Store(AbstractStore):
1500
1499
 
1501
1500
  log_path = sky_logging.generate_tmp_logging_file_path(
1502
1501
  _STORAGE_LOG_FILE_NAME)
1503
- sync_path = f'{source_message} -> s3://{self.name}/'
1502
+ sync_path = f'{source_message} -> s3://{self.name}{sub_path}/'
1504
1503
  with rich_utils.safe_status(
1505
1504
  ux_utils.spinner_message(f'Syncing {sync_path}',
1506
1505
  log_path=log_path)):
@@ -1959,11 +1958,13 @@ class GcsStore(AbstractStore):
1959
1958
  copy_list = '\n'.join(
1960
1959
  os.path.abspath(os.path.expanduser(p)) for p in source_path_list)
1961
1960
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
1961
+ sub_path = (f'/{self._bucket_sub_path}'
1962
+ if self._bucket_sub_path else '')
1962
1963
  sync_command = (f'{alias_gen}; echo "{copy_list}" | {gsutil_alias} '
1963
- f'cp -e -n -r -I gs://{self.name}')
1964
+ f'cp -e -n -r -I gs://{self.name}{sub_path}')
1964
1965
  log_path = sky_logging.generate_tmp_logging_file_path(
1965
1966
  _STORAGE_LOG_FILE_NAME)
1966
- sync_path = f'{source_message} -> gs://{self.name}/'
1967
+ sync_path = f'{source_message} -> gs://{self.name}{sub_path}/'
1967
1968
  with rich_utils.safe_status(
1968
1969
  ux_utils.spinner_message(f'Syncing {sync_path}',
1969
1970
  log_path=log_path)):
@@ -1995,13 +1996,13 @@ class GcsStore(AbstractStore):
1995
1996
  set to True, the directory is created in the bucket root and
1996
1997
  contents are uploaded to it.
1997
1998
  """
1999
+ sub_path = (f'/{self._bucket_sub_path}'
2000
+ if self._bucket_sub_path else '')
1998
2001
 
1999
2002
  def get_file_sync_command(base_dir_path, file_names):
2000
2003
  sync_format = '|'.join(file_names)
2001
2004
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
2002
2005
  base_dir_path = shlex.quote(base_dir_path)
2003
- sub_path = (f'/{self._bucket_sub_path}'
2004
- if self._bucket_sub_path else '')
2005
2006
  sync_command = (f'{alias_gen}; {gsutil_alias} '
2006
2007
  f'rsync -e -x \'^(?!{sync_format}$).*\' '
2007
2008
  f'{base_dir_path} gs://{self.name}{sub_path}')
@@ -2014,8 +2015,6 @@ class GcsStore(AbstractStore):
2014
2015
  excludes = '|'.join(excluded_list)
2015
2016
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
2016
2017
  src_dir_path = shlex.quote(src_dir_path)
2017
- sub_path = (f'/{self._bucket_sub_path}'
2018
- if self._bucket_sub_path else '')
2019
2018
  sync_command = (f'{alias_gen}; {gsutil_alias} '
2020
2019
  f'rsync -e -r -x \'({excludes})\' {src_dir_path} '
2021
2020
  f'gs://{self.name}{sub_path}/{dest_dir_name}')
@@ -2029,7 +2028,7 @@ class GcsStore(AbstractStore):
2029
2028
 
2030
2029
  log_path = sky_logging.generate_tmp_logging_file_path(
2031
2030
  _STORAGE_LOG_FILE_NAME)
2032
- sync_path = f'{source_message} -> gs://{self.name}/'
2031
+ sync_path = f'{source_message} -> gs://{self.name}{sub_path}/'
2033
2032
  with rich_utils.safe_status(
2034
2033
  ux_utils.spinner_message(f'Syncing {sync_path}',
2035
2034
  log_path=log_path)):
@@ -2307,15 +2306,24 @@ class AzureBlobStore(AbstractStore):
2307
2306
  An instance of AzureBlobStore.
2308
2307
  """
2309
2308
  assert isinstance(metadata, AzureBlobStore.AzureBlobStoreMetadata)
2310
- return cls(name=override_args.get('name', metadata.name),
2311
- storage_account_name=override_args.get(
2312
- 'storage_account', metadata.storage_account_name),
2313
- source=override_args.get('source', metadata.source),
2314
- region=override_args.get('region', metadata.region),
2315
- is_sky_managed=override_args.get('is_sky_managed',
2316
- metadata.is_sky_managed),
2317
- sync_on_reconstruction=override_args.get(
2318
- 'sync_on_reconstruction', True))
2309
+ # TODO: this needs to be kept in sync with the abstract
2310
+ # AbstractStore.from_metadata.
2311
+ return cls(
2312
+ name=override_args.get('name', metadata.name),
2313
+ storage_account_name=override_args.get(
2314
+ 'storage_account', metadata.storage_account_name),
2315
+ source=override_args.get('source', metadata.source),
2316
+ region=override_args.get('region', metadata.region),
2317
+ is_sky_managed=override_args.get('is_sky_managed',
2318
+ metadata.is_sky_managed),
2319
+ sync_on_reconstruction=override_args.get('sync_on_reconstruction',
2320
+ True),
2321
+ # Backward compatibility
2322
+ # TODO: remove the hasattr check after v0.11.0
2323
+ _bucket_sub_path=override_args.get(
2324
+ '_bucket_sub_path',
2325
+ metadata._bucket_sub_path # pylint: disable=protected-access
2326
+ ) if hasattr(metadata, '_bucket_sub_path') else None)
2319
2327
 
2320
2328
  def get_metadata(self) -> AzureBlobStoreMetadata:
2321
2329
  return self.AzureBlobStoreMetadata(
@@ -2795,6 +2803,8 @@ class AzureBlobStore(AbstractStore):
2795
2803
  set to True, the directory is created in the bucket root and
2796
2804
  contents are uploaded to it.
2797
2805
  """
2806
+ container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2807
+ if self._bucket_sub_path else self.container_name)
2798
2808
 
2799
2809
  def get_file_sync_command(base_dir_path, file_names) -> str:
2800
2810
  # shlex.quote is not used for file_names as 'az storage blob sync'
@@ -2803,8 +2813,6 @@ class AzureBlobStore(AbstractStore):
2803
2813
  includes_list = ';'.join(file_names)
2804
2814
  includes = f'--include-pattern "{includes_list}"'
2805
2815
  base_dir_path = shlex.quote(base_dir_path)
2806
- container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2807
- if self._bucket_sub_path else self.container_name)
2808
2816
  sync_command = (f'az storage blob sync '
2809
2817
  f'--account-name {self.storage_account_name} '
2810
2818
  f'--account-key {self.storage_account_key} '
@@ -2822,18 +2830,17 @@ class AzureBlobStore(AbstractStore):
2822
2830
  [file_name.rstrip('*') for file_name in excluded_list])
2823
2831
  excludes = f'--exclude-path "{excludes_list}"'
2824
2832
  src_dir_path = shlex.quote(src_dir_path)
2825
- container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2826
- if self._bucket_sub_path else
2827
- f'{self.container_name}')
2828
2833
  if dest_dir_name:
2829
- container_path = f'{container_path}/{dest_dir_name}'
2834
+ dest_dir_name = f'/{dest_dir_name}'
2835
+ else:
2836
+ dest_dir_name = ''
2830
2837
  sync_command = (f'az storage blob sync '
2831
2838
  f'--account-name {self.storage_account_name} '
2832
2839
  f'--account-key {self.storage_account_key} '
2833
2840
  f'{excludes} '
2834
2841
  '--delete-destination false '
2835
2842
  f'--source {src_dir_path} '
2836
- f'--container {container_path}')
2843
+ f'--container {container_path}{dest_dir_name}')
2837
2844
  return sync_command
2838
2845
 
2839
2846
  # Generate message for upload
@@ -2844,7 +2851,7 @@ class AzureBlobStore(AbstractStore):
2844
2851
  source_message = source_path_list[0]
2845
2852
  container_endpoint = data_utils.AZURE_CONTAINER_URL.format(
2846
2853
  storage_account_name=self.storage_account_name,
2847
- container_name=self.name)
2854
+ container_name=container_path)
2848
2855
  log_path = sky_logging.generate_tmp_logging_file_path(
2849
2856
  _STORAGE_LOG_FILE_NAME)
2850
2857
  sync_path = f'{source_message} -> {container_endpoint}/'
@@ -3238,6 +3245,8 @@ class R2Store(AbstractStore):
3238
3245
  set to True, the directory is created in the bucket root and
3239
3246
  contents are uploaded to it.
3240
3247
  """
3248
+ sub_path = (f'/{self._bucket_sub_path}'
3249
+ if self._bucket_sub_path else '')
3241
3250
 
3242
3251
  def get_file_sync_command(base_dir_path, file_names):
3243
3252
  includes = ' '.join([
@@ -3246,8 +3255,6 @@ class R2Store(AbstractStore):
3246
3255
  ])
3247
3256
  endpoint_url = cloudflare.create_endpoint()
3248
3257
  base_dir_path = shlex.quote(base_dir_path)
3249
- sub_path = (f'/{self._bucket_sub_path}'
3250
- if self._bucket_sub_path else '')
3251
3258
  sync_command = ('AWS_SHARED_CREDENTIALS_FILE='
3252
3259
  f'{cloudflare.R2_CREDENTIALS_PATH} '
3253
3260
  'aws s3 sync --no-follow-symlinks --exclude="*" '
@@ -3267,8 +3274,6 @@ class R2Store(AbstractStore):
3267
3274
  ])
3268
3275
  endpoint_url = cloudflare.create_endpoint()
3269
3276
  src_dir_path = shlex.quote(src_dir_path)
3270
- sub_path = (f'/{self._bucket_sub_path}'
3271
- if self._bucket_sub_path else '')
3272
3277
  sync_command = ('AWS_SHARED_CREDENTIALS_FILE='
3273
3278
  f'{cloudflare.R2_CREDENTIALS_PATH} '
3274
3279
  f'aws s3 sync --no-follow-symlinks {excludes} '
@@ -3286,7 +3291,7 @@ class R2Store(AbstractStore):
3286
3291
 
3287
3292
  log_path = sky_logging.generate_tmp_logging_file_path(
3288
3293
  _STORAGE_LOG_FILE_NAME)
3289
- sync_path = f'{source_message} -> r2://{self.name}/'
3294
+ sync_path = f'{source_message} -> r2://{self.name}{sub_path}/'
3290
3295
  with rich_utils.safe_status(
3291
3296
  ux_utils.spinner_message(f'Syncing {sync_path}',
3292
3297
  log_path=log_path)):
@@ -3710,6 +3715,8 @@ class IBMCosStore(AbstractStore):
3710
3715
  set to True, the directory is created in the bucket root and
3711
3716
  contents are uploaded to it.
3712
3717
  """
3718
+ sub_path = (f'/{self._bucket_sub_path}'
3719
+ if self._bucket_sub_path else '')
3713
3720
 
3714
3721
  def get_dir_sync_command(src_dir_path, dest_dir_name) -> str:
3715
3722
  """returns an rclone command that copies a complete folder
@@ -3731,8 +3738,6 @@ class IBMCosStore(AbstractStore):
3731
3738
  # .git directory is excluded from the sync
3732
3739
  # wrapping src_dir_path with "" to support path with spaces
3733
3740
  src_dir_path = shlex.quote(src_dir_path)
3734
- sub_path = (f'/{self._bucket_sub_path}'
3735
- if self._bucket_sub_path else '')
3736
3741
  sync_command = (
3737
3742
  'rclone copy --exclude ".git/*" '
3738
3743
  f'{src_dir_path} '
@@ -3763,8 +3768,6 @@ class IBMCosStore(AbstractStore):
3763
3768
  for file_name in file_names
3764
3769
  ])
3765
3770
  base_dir_path = shlex.quote(base_dir_path)
3766
- sub_path = (f'/{self._bucket_sub_path}'
3767
- if self._bucket_sub_path else '')
3768
3771
  sync_command = (
3769
3772
  'rclone copy '
3770
3773
  f'{includes} {base_dir_path} '
@@ -3779,7 +3782,8 @@ class IBMCosStore(AbstractStore):
3779
3782
 
3780
3783
  log_path = sky_logging.generate_tmp_logging_file_path(
3781
3784
  _STORAGE_LOG_FILE_NAME)
3782
- sync_path = f'{source_message} -> cos://{self.region}/{self.name}/'
3785
+ sync_path = (
3786
+ f'{source_message} -> cos://{self.region}/{self.name}{sub_path}/')
3783
3787
  with rich_utils.safe_status(
3784
3788
  ux_utils.spinner_message(f'Syncing {sync_path}',
3785
3789
  log_path=log_path)):
@@ -4178,15 +4182,21 @@ class OciStore(AbstractStore):
4178
4182
  set to True, the directory is created in the bucket root and
4179
4183
  contents are uploaded to it.
4180
4184
  """
4185
+ sub_path = (f'{self._bucket_sub_path}/'
4186
+ if self._bucket_sub_path else '')
4181
4187
 
4182
4188
  @oci.with_oci_env
4183
4189
  def get_file_sync_command(base_dir_path, file_names):
4184
4190
  includes = ' '.join(
4185
4191
  [f'--include "{file_name}"' for file_name in file_names])
4192
+ prefix_arg = ''
4193
+ if sub_path:
4194
+ prefix_arg = f'--object-prefix "{sub_path.strip("/")}"'
4186
4195
  sync_command = (
4187
4196
  'oci os object bulk-upload --no-follow-symlinks --overwrite '
4188
4197
  f'--bucket-name {self.name} --namespace-name {self.namespace} '
4189
4198
  f'--region {self.region} --src-dir "{base_dir_path}" '
4199
+ f'{prefix_arg} '
4190
4200
  f'{includes}')
4191
4201
 
4192
4202
  return sync_command
@@ -4207,7 +4217,8 @@ class OciStore(AbstractStore):
4207
4217
  sync_command = (
4208
4218
  'oci os object bulk-upload --no-follow-symlinks --overwrite '
4209
4219
  f'--bucket-name {self.name} --namespace-name {self.namespace} '
4210
- f'--region {self.region} --object-prefix "{dest_dir_name}" '
4220
+ f'--region {self.region} '
4221
+ f'--object-prefix "{sub_path}{dest_dir_name}" '
4211
4222
  f'--src-dir "{src_dir_path}" {excludes}')
4212
4223
 
4213
4224
  return sync_command
@@ -4220,7 +4231,7 @@ class OciStore(AbstractStore):
4220
4231
 
4221
4232
  log_path = sky_logging.generate_tmp_logging_file_path(
4222
4233
  _STORAGE_LOG_FILE_NAME)
4223
- sync_path = f'{source_message} -> oci://{self.name}/'
4234
+ sync_path = f'{source_message} -> oci://{self.name}/{sub_path}'
4224
4235
  with rich_utils.safe_status(
4225
4236
  ux_utils.spinner_message(f'Syncing {sync_path}',
4226
4237
  log_path=log_path)):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: skypilot-nightly
3
- Version: 1.0.0.dev20250213
3
+ Version: 1.0.0.dev20250214
4
4
  Summary: SkyPilot: An intercloud broker for the clouds
5
5
  Author: SkyPilot Team
6
6
  License: Apache 2.0
@@ -184,7 +184,9 @@ Dynamic: summary
184
184
 
185
185
  ----
186
186
  :fire: *News* :fire:
187
- - [Jan 2025] Launch and Serve **[DeepSeek-R1](https://github.com/deepseek-ai/DeepSeek-R1)** and **[Janus](https://github.com/deepseek-ai/DeepSeek-Janus)** on Kubernetes or Any Cloud: [**R1 example**](./llm/deepseek-r1/) and [**Janus example**](./llm/deepseek-janus/)
187
+ - [Feb 2025] Run and Serve DeepSeek-R1 671B using SkyPilot and SGLang with high throughput: [**example**](./llm/deepseek-r1/)
188
+ - [Jan 2025] Prepare and Serve Large-Scale Image Search with **Vector Database**: [**blog post**](https://blog.skypilot.co/large-scale-vector-database/) [**example**](./examples/vector_database/)
189
+ - [Jan 2025] Launch and Serve distilled models from **[DeepSeek-R1](https://github.com/deepseek-ai/DeepSeek-R1)** and **[Janus](https://github.com/deepseek-ai/DeepSeek-Janus)** on Kubernetes or Any Cloud: [**R1 example**](./llm/deepseek-r1-distilled/) and [**Janus example**](./llm/deepseek-janus/)
188
190
  - [Oct 2024] :tada: **SkyPilot crossed 1M+ downloads** :tada:: Thank you to our community! [**Twitter/X**](https://x.com/skypilot_org/status/1844770841718067638)
189
191
  - [Sep 2024] Point, Launch and Serve **Llama 3.2** on Kubernetes or Any Cloud: [**example**](./llm/llama-3_2/)
190
192
  - [Sep 2024] Run and deploy [**Pixtral**](./llm/pixtral), the first open-source multimodal model from Mistral AI.
@@ -342,7 +344,7 @@ Runnable examples:
342
344
  - [LocalGPT](./llm/localgpt)
343
345
  - [Falcon](./llm/falcon)
344
346
  - Add yours here & see more in [`llm/`](./llm)!
345
- - Framework examples: [PyTorch DDP](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_torch.yaml), [DeepSpeed](./examples/deepspeed-multinode/sky.yaml), [JAX/Flax on TPU](https://github.com/skypilot-org/skypilot/blob/master/examples/tpu/tpuvm_mnist.yaml), [Stable Diffusion](https://github.com/skypilot-org/skypilot/tree/master/examples/stable_diffusion), [Detectron2](https://github.com/skypilot-org/skypilot/blob/master/examples/detectron2_docker.yaml), [Distributed](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_tf_app.py) [TensorFlow](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_app_storage.yaml), [Ray Train](examples/distributed_ray_train/ray_train.yaml), [NeMo](https://github.com/skypilot-org/skypilot/blob/master/examples/nemo/), [programmatic grid search](https://github.com/skypilot-org/skypilot/blob/master/examples/huggingface_glue_imdb_grid_search_app.py), [Docker](https://github.com/skypilot-org/skypilot/blob/master/examples/docker/echo_app.yaml), [Cog](https://github.com/skypilot-org/skypilot/blob/master/examples/cog/), [Unsloth](https://github.com/skypilot-org/skypilot/blob/master/examples/unsloth/unsloth.yaml), [Ollama](https://github.com/skypilot-org/skypilot/blob/master/llm/ollama), [llm.c](https://github.com/skypilot-org/skypilot/tree/master/llm/gpt-2), [Airflow](./examples/airflow/training_workflow) and [many more (`examples/`)](./examples).
347
+ - Framework examples: [Vector Database](./examples/vector_database/), [PyTorch DDP](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_torch.yaml), [DeepSpeed](./examples/deepspeed-multinode/sky.yaml), [JAX/Flax on TPU](https://github.com/skypilot-org/skypilot/blob/master/examples/tpu/tpuvm_mnist.yaml), [Stable Diffusion](https://github.com/skypilot-org/skypilot/tree/master/examples/stable_diffusion), [Detectron2](https://github.com/skypilot-org/skypilot/blob/master/examples/detectron2_docker.yaml), [Distributed](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_tf_app.py) [TensorFlow](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_app_storage.yaml), [Ray Train](examples/distributed_ray_train/ray_train.yaml), [NeMo](https://github.com/skypilot-org/skypilot/blob/master/examples/nemo/), [programmatic grid search](https://github.com/skypilot-org/skypilot/blob/master/examples/huggingface_glue_imdb_grid_search_app.py), [Docker](https://github.com/skypilot-org/skypilot/blob/master/examples/docker/echo_app.yaml), [Cog](https://github.com/skypilot-org/skypilot/blob/master/examples/cog/), [Unsloth](https://github.com/skypilot-org/skypilot/blob/master/examples/unsloth/unsloth.yaml), [Ollama](https://github.com/skypilot-org/skypilot/blob/master/llm/ollama), [llm.c](https://github.com/skypilot-org/skypilot/tree/master/llm/gpt-2), [Airflow](./examples/airflow/training_workflow) and [many more (`examples/`)](./examples).
346
348
 
347
349
  Case Studies and Integrations: [Community Spotlights](https://blog.skypilot.co/community/)
348
350
 
@@ -1,4 +1,4 @@
1
- sky/__init__.py,sha256=ulWt-DtbuSpxFnOQtMOqMTPqAYFd6WgWd_T-bxS7_QM,5560
1
+ sky/__init__.py,sha256=hnw0sI17qKlRS50e2Ux9neBg1F8HcuZMLdlV7S9ziIE,5560
2
2
  sky/admin_policy.py,sha256=hPo02f_A32gCqhUueF0QYy1fMSSKqRwYEg_9FxScN_s,3248
3
3
  sky/authentication.py,sha256=MNc9uHnvQ1EsEl8SsrYcYCGbxcnDbR6gaRCXVNd5RZE,22338
4
4
  sky/check.py,sha256=xzLlxUkBCrzpOho8lw65EvKLPl_b9lA2nteF5MSYbDQ,10885
@@ -98,7 +98,7 @@ sky/data/__init__.py,sha256=Nhaf1NURisXpZuwWANa2IuCyppIuc720FRwqSE2oEwY,184
98
98
  sky/data/data_transfer.py,sha256=wixC4_3_JaeJFdGKOp-O5ulcsMugDSgrCR0SnPpugGc,8946
99
99
  sky/data/data_utils.py,sha256=HjcgMDuWRR_fNQ9gjuROi9GgPVvTGApiJwxGtdb2_UU,28860
100
100
  sky/data/mounting_utils.py,sha256=tJHBPEDP1Wg_r3oSGBwFhMDLnPCMPSFRz26O0QkDd0Y,14908
101
- sky/data/storage.py,sha256=CWVKnHhdzXw1biPbRqYizkyVexL_OCELuJCqtd4hit4,204094
101
+ sky/data/storage.py,sha256=vlipyOOxw43W6t5O9ccL53OYWAxPVMjF16qlfbZhdQo,204431
102
102
  sky/data/storage_utils.py,sha256=cM3kxlffYE7PnJySDu8huyUsMX_JYsf9uer8r5OYsjo,9556
103
103
  sky/jobs/__init__.py,sha256=ObZcz3lL1ip8JcmR6gbfZ4RMMfXJJdsnuU2zLQUb8jY,1546
104
104
  sky/jobs/constants.py,sha256=9kIdpwWNI9zWKQO39LTg9spUMGl5Iqx4ByIjRlic7Hw,1893
@@ -298,9 +298,9 @@ sky/utils/kubernetes/k8s_gpu_labeler_job.yaml,sha256=k0TBoQ4zgf79-sVkixKSGYFHQ7Z
298
298
  sky/utils/kubernetes/k8s_gpu_labeler_setup.yaml,sha256=VLKT2KKimZu1GDg_4AIlIt488oMQvhRZWwsj9vBbPUg,3812
299
299
  sky/utils/kubernetes/rsync_helper.sh,sha256=h4YwrPFf9727CACnMJvF3EyK_0OeOYKKt4su_daKekw,1256
300
300
  sky/utils/kubernetes/ssh_jump_lifecycle_manager.py,sha256=Kq1MDygF2IxFmu9FXpCxqucXLmeUrvs6OtRij6XTQbo,6554
301
- skypilot_nightly-1.0.0.dev20250213.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
302
- skypilot_nightly-1.0.0.dev20250213.dist-info/METADATA,sha256=hLWjPTEc6CnhdlKg2uvdj1VZgjmf3XNt0fJV3R61GTc,21397
303
- skypilot_nightly-1.0.0.dev20250213.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
304
- skypilot_nightly-1.0.0.dev20250213.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
305
- skypilot_nightly-1.0.0.dev20250213.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
306
- skypilot_nightly-1.0.0.dev20250213.dist-info/RECORD,,
301
+ skypilot_nightly-1.0.0.dev20250214.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
302
+ skypilot_nightly-1.0.0.dev20250214.dist-info/METADATA,sha256=jxTYT5S0cFhCCQ6C77MoNQIW2yEJCxgX1Nz4LfPoUgI,21800
303
+ skypilot_nightly-1.0.0.dev20250214.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
304
+ skypilot_nightly-1.0.0.dev20250214.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
305
+ skypilot_nightly-1.0.0.dev20250214.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
306
+ skypilot_nightly-1.0.0.dev20250214.dist-info/RECORD,,