skypilot-nightly 1.0.0.dev20250213__py3-none-any.whl → 1.0.0.dev20250215__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sky/__init__.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import urllib.request
6
6
 
7
7
  # Replaced with the current commit when building the wheels.
8
- _SKYPILOT_COMMIT_SHA = 'c49961417a83b049b3f3435a252c8ec5ea0fb5e6'
8
+ _SKYPILOT_COMMIT_SHA = '354bbdf3a1d031b350011bc76570cf8c009ecc4a'
9
9
 
10
10
 
11
11
  def _get_git_commit():
@@ -35,7 +35,7 @@ def _get_git_commit():
35
35
 
36
36
 
37
37
  __commit__ = _get_git_commit()
38
- __version__ = '1.0.0.dev20250213'
38
+ __version__ = '1.0.0.dev20250215'
39
39
  __root_dir__ = os.path.dirname(os.path.abspath(__file__))
40
40
 
41
41
 
sky/data/storage.py CHANGED
@@ -354,7 +354,8 @@ class AbstractStore:
354
354
  metadata.is_sky_managed),
355
355
  sync_on_reconstruction=override_args.get('sync_on_reconstruction',
356
356
  True),
357
- # backward compatibility
357
+ # Backward compatibility
358
+ # TODO: remove the hasattr check after v0.11.0
358
359
  _bucket_sub_path=override_args.get(
359
360
  '_bucket_sub_path',
360
361
  metadata._bucket_sub_path # pylint: disable=protected-access
@@ -1462,6 +1463,8 @@ class S3Store(AbstractStore):
1462
1463
  set to True, the directory is created in the bucket root and
1463
1464
  contents are uploaded to it.
1464
1465
  """
1466
+ sub_path = (f'/{self._bucket_sub_path}'
1467
+ if self._bucket_sub_path else '')
1465
1468
 
1466
1469
  def get_file_sync_command(base_dir_path, file_names):
1467
1470
  includes = ' '.join([
@@ -1469,8 +1472,6 @@ class S3Store(AbstractStore):
1469
1472
  for file_name in file_names
1470
1473
  ])
1471
1474
  base_dir_path = shlex.quote(base_dir_path)
1472
- sub_path = (f'/{self._bucket_sub_path}'
1473
- if self._bucket_sub_path else '')
1474
1475
  sync_command = ('aws s3 sync --no-follow-symlinks --exclude="*" '
1475
1476
  f'{includes} {base_dir_path} '
1476
1477
  f's3://{self.name}{sub_path}')
@@ -1485,8 +1486,6 @@ class S3Store(AbstractStore):
1485
1486
  for file_name in excluded_list
1486
1487
  ])
1487
1488
  src_dir_path = shlex.quote(src_dir_path)
1488
- sub_path = (f'/{self._bucket_sub_path}'
1489
- if self._bucket_sub_path else '')
1490
1489
  sync_command = (f'aws s3 sync --no-follow-symlinks {excludes} '
1491
1490
  f'{src_dir_path} '
1492
1491
  f's3://{self.name}{sub_path}/{dest_dir_name}')
@@ -1500,7 +1499,7 @@ class S3Store(AbstractStore):
1500
1499
 
1501
1500
  log_path = sky_logging.generate_tmp_logging_file_path(
1502
1501
  _STORAGE_LOG_FILE_NAME)
1503
- sync_path = f'{source_message} -> s3://{self.name}/'
1502
+ sync_path = f'{source_message} -> s3://{self.name}{sub_path}/'
1504
1503
  with rich_utils.safe_status(
1505
1504
  ux_utils.spinner_message(f'Syncing {sync_path}',
1506
1505
  log_path=log_path)):
@@ -1959,11 +1958,13 @@ class GcsStore(AbstractStore):
1959
1958
  copy_list = '\n'.join(
1960
1959
  os.path.abspath(os.path.expanduser(p)) for p in source_path_list)
1961
1960
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
1961
+ sub_path = (f'/{self._bucket_sub_path}'
1962
+ if self._bucket_sub_path else '')
1962
1963
  sync_command = (f'{alias_gen}; echo "{copy_list}" | {gsutil_alias} '
1963
- f'cp -e -n -r -I gs://{self.name}')
1964
+ f'cp -e -n -r -I gs://{self.name}{sub_path}')
1964
1965
  log_path = sky_logging.generate_tmp_logging_file_path(
1965
1966
  _STORAGE_LOG_FILE_NAME)
1966
- sync_path = f'{source_message} -> gs://{self.name}/'
1967
+ sync_path = f'{source_message} -> gs://{self.name}{sub_path}/'
1967
1968
  with rich_utils.safe_status(
1968
1969
  ux_utils.spinner_message(f'Syncing {sync_path}',
1969
1970
  log_path=log_path)):
@@ -1995,13 +1996,13 @@ class GcsStore(AbstractStore):
1995
1996
  set to True, the directory is created in the bucket root and
1996
1997
  contents are uploaded to it.
1997
1998
  """
1999
+ sub_path = (f'/{self._bucket_sub_path}'
2000
+ if self._bucket_sub_path else '')
1998
2001
 
1999
2002
  def get_file_sync_command(base_dir_path, file_names):
2000
2003
  sync_format = '|'.join(file_names)
2001
2004
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
2002
2005
  base_dir_path = shlex.quote(base_dir_path)
2003
- sub_path = (f'/{self._bucket_sub_path}'
2004
- if self._bucket_sub_path else '')
2005
2006
  sync_command = (f'{alias_gen}; {gsutil_alias} '
2006
2007
  f'rsync -e -x \'^(?!{sync_format}$).*\' '
2007
2008
  f'{base_dir_path} gs://{self.name}{sub_path}')
@@ -2014,8 +2015,6 @@ class GcsStore(AbstractStore):
2014
2015
  excludes = '|'.join(excluded_list)
2015
2016
  gsutil_alias, alias_gen = data_utils.get_gsutil_command()
2016
2017
  src_dir_path = shlex.quote(src_dir_path)
2017
- sub_path = (f'/{self._bucket_sub_path}'
2018
- if self._bucket_sub_path else '')
2019
2018
  sync_command = (f'{alias_gen}; {gsutil_alias} '
2020
2019
  f'rsync -e -r -x \'({excludes})\' {src_dir_path} '
2021
2020
  f'gs://{self.name}{sub_path}/{dest_dir_name}')
@@ -2029,7 +2028,7 @@ class GcsStore(AbstractStore):
2029
2028
 
2030
2029
  log_path = sky_logging.generate_tmp_logging_file_path(
2031
2030
  _STORAGE_LOG_FILE_NAME)
2032
- sync_path = f'{source_message} -> gs://{self.name}/'
2031
+ sync_path = f'{source_message} -> gs://{self.name}{sub_path}/'
2033
2032
  with rich_utils.safe_status(
2034
2033
  ux_utils.spinner_message(f'Syncing {sync_path}',
2035
2034
  log_path=log_path)):
@@ -2307,15 +2306,24 @@ class AzureBlobStore(AbstractStore):
2307
2306
  An instance of AzureBlobStore.
2308
2307
  """
2309
2308
  assert isinstance(metadata, AzureBlobStore.AzureBlobStoreMetadata)
2310
- return cls(name=override_args.get('name', metadata.name),
2311
- storage_account_name=override_args.get(
2312
- 'storage_account', metadata.storage_account_name),
2313
- source=override_args.get('source', metadata.source),
2314
- region=override_args.get('region', metadata.region),
2315
- is_sky_managed=override_args.get('is_sky_managed',
2316
- metadata.is_sky_managed),
2317
- sync_on_reconstruction=override_args.get(
2318
- 'sync_on_reconstruction', True))
2309
+ # TODO: this needs to be kept in sync with the abstract
2310
+ # AbstractStore.from_metadata.
2311
+ return cls(
2312
+ name=override_args.get('name', metadata.name),
2313
+ storage_account_name=override_args.get(
2314
+ 'storage_account', metadata.storage_account_name),
2315
+ source=override_args.get('source', metadata.source),
2316
+ region=override_args.get('region', metadata.region),
2317
+ is_sky_managed=override_args.get('is_sky_managed',
2318
+ metadata.is_sky_managed),
2319
+ sync_on_reconstruction=override_args.get('sync_on_reconstruction',
2320
+ True),
2321
+ # Backward compatibility
2322
+ # TODO: remove the hasattr check after v0.11.0
2323
+ _bucket_sub_path=override_args.get(
2324
+ '_bucket_sub_path',
2325
+ metadata._bucket_sub_path # pylint: disable=protected-access
2326
+ ) if hasattr(metadata, '_bucket_sub_path') else None)
2319
2327
 
2320
2328
  def get_metadata(self) -> AzureBlobStoreMetadata:
2321
2329
  return self.AzureBlobStoreMetadata(
@@ -2795,6 +2803,8 @@ class AzureBlobStore(AbstractStore):
2795
2803
  set to True, the directory is created in the bucket root and
2796
2804
  contents are uploaded to it.
2797
2805
  """
2806
+ container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2807
+ if self._bucket_sub_path else self.container_name)
2798
2808
 
2799
2809
  def get_file_sync_command(base_dir_path, file_names) -> str:
2800
2810
  # shlex.quote is not used for file_names as 'az storage blob sync'
@@ -2803,8 +2813,6 @@ class AzureBlobStore(AbstractStore):
2803
2813
  includes_list = ';'.join(file_names)
2804
2814
  includes = f'--include-pattern "{includes_list}"'
2805
2815
  base_dir_path = shlex.quote(base_dir_path)
2806
- container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2807
- if self._bucket_sub_path else self.container_name)
2808
2816
  sync_command = (f'az storage blob sync '
2809
2817
  f'--account-name {self.storage_account_name} '
2810
2818
  f'--account-key {self.storage_account_key} '
@@ -2822,18 +2830,17 @@ class AzureBlobStore(AbstractStore):
2822
2830
  [file_name.rstrip('*') for file_name in excluded_list])
2823
2831
  excludes = f'--exclude-path "{excludes_list}"'
2824
2832
  src_dir_path = shlex.quote(src_dir_path)
2825
- container_path = (f'{self.container_name}/{self._bucket_sub_path}'
2826
- if self._bucket_sub_path else
2827
- f'{self.container_name}')
2828
2833
  if dest_dir_name:
2829
- container_path = f'{container_path}/{dest_dir_name}'
2834
+ dest_dir_name = f'/{dest_dir_name}'
2835
+ else:
2836
+ dest_dir_name = ''
2830
2837
  sync_command = (f'az storage blob sync '
2831
2838
  f'--account-name {self.storage_account_name} '
2832
2839
  f'--account-key {self.storage_account_key} '
2833
2840
  f'{excludes} '
2834
2841
  '--delete-destination false '
2835
2842
  f'--source {src_dir_path} '
2836
- f'--container {container_path}')
2843
+ f'--container {container_path}{dest_dir_name}')
2837
2844
  return sync_command
2838
2845
 
2839
2846
  # Generate message for upload
@@ -2844,7 +2851,7 @@ class AzureBlobStore(AbstractStore):
2844
2851
  source_message = source_path_list[0]
2845
2852
  container_endpoint = data_utils.AZURE_CONTAINER_URL.format(
2846
2853
  storage_account_name=self.storage_account_name,
2847
- container_name=self.name)
2854
+ container_name=container_path)
2848
2855
  log_path = sky_logging.generate_tmp_logging_file_path(
2849
2856
  _STORAGE_LOG_FILE_NAME)
2850
2857
  sync_path = f'{source_message} -> {container_endpoint}/'
@@ -3238,6 +3245,8 @@ class R2Store(AbstractStore):
3238
3245
  set to True, the directory is created in the bucket root and
3239
3246
  contents are uploaded to it.
3240
3247
  """
3248
+ sub_path = (f'/{self._bucket_sub_path}'
3249
+ if self._bucket_sub_path else '')
3241
3250
 
3242
3251
  def get_file_sync_command(base_dir_path, file_names):
3243
3252
  includes = ' '.join([
@@ -3246,8 +3255,6 @@ class R2Store(AbstractStore):
3246
3255
  ])
3247
3256
  endpoint_url = cloudflare.create_endpoint()
3248
3257
  base_dir_path = shlex.quote(base_dir_path)
3249
- sub_path = (f'/{self._bucket_sub_path}'
3250
- if self._bucket_sub_path else '')
3251
3258
  sync_command = ('AWS_SHARED_CREDENTIALS_FILE='
3252
3259
  f'{cloudflare.R2_CREDENTIALS_PATH} '
3253
3260
  'aws s3 sync --no-follow-symlinks --exclude="*" '
@@ -3267,8 +3274,6 @@ class R2Store(AbstractStore):
3267
3274
  ])
3268
3275
  endpoint_url = cloudflare.create_endpoint()
3269
3276
  src_dir_path = shlex.quote(src_dir_path)
3270
- sub_path = (f'/{self._bucket_sub_path}'
3271
- if self._bucket_sub_path else '')
3272
3277
  sync_command = ('AWS_SHARED_CREDENTIALS_FILE='
3273
3278
  f'{cloudflare.R2_CREDENTIALS_PATH} '
3274
3279
  f'aws s3 sync --no-follow-symlinks {excludes} '
@@ -3286,7 +3291,7 @@ class R2Store(AbstractStore):
3286
3291
 
3287
3292
  log_path = sky_logging.generate_tmp_logging_file_path(
3288
3293
  _STORAGE_LOG_FILE_NAME)
3289
- sync_path = f'{source_message} -> r2://{self.name}/'
3294
+ sync_path = f'{source_message} -> r2://{self.name}{sub_path}/'
3290
3295
  with rich_utils.safe_status(
3291
3296
  ux_utils.spinner_message(f'Syncing {sync_path}',
3292
3297
  log_path=log_path)):
@@ -3710,6 +3715,8 @@ class IBMCosStore(AbstractStore):
3710
3715
  set to True, the directory is created in the bucket root and
3711
3716
  contents are uploaded to it.
3712
3717
  """
3718
+ sub_path = (f'/{self._bucket_sub_path}'
3719
+ if self._bucket_sub_path else '')
3713
3720
 
3714
3721
  def get_dir_sync_command(src_dir_path, dest_dir_name) -> str:
3715
3722
  """returns an rclone command that copies a complete folder
@@ -3731,8 +3738,6 @@ class IBMCosStore(AbstractStore):
3731
3738
  # .git directory is excluded from the sync
3732
3739
  # wrapping src_dir_path with "" to support path with spaces
3733
3740
  src_dir_path = shlex.quote(src_dir_path)
3734
- sub_path = (f'/{self._bucket_sub_path}'
3735
- if self._bucket_sub_path else '')
3736
3741
  sync_command = (
3737
3742
  'rclone copy --exclude ".git/*" '
3738
3743
  f'{src_dir_path} '
@@ -3763,8 +3768,6 @@ class IBMCosStore(AbstractStore):
3763
3768
  for file_name in file_names
3764
3769
  ])
3765
3770
  base_dir_path = shlex.quote(base_dir_path)
3766
- sub_path = (f'/{self._bucket_sub_path}'
3767
- if self._bucket_sub_path else '')
3768
3771
  sync_command = (
3769
3772
  'rclone copy '
3770
3773
  f'{includes} {base_dir_path} '
@@ -3779,7 +3782,8 @@ class IBMCosStore(AbstractStore):
3779
3782
 
3780
3783
  log_path = sky_logging.generate_tmp_logging_file_path(
3781
3784
  _STORAGE_LOG_FILE_NAME)
3782
- sync_path = f'{source_message} -> cos://{self.region}/{self.name}/'
3785
+ sync_path = (
3786
+ f'{source_message} -> cos://{self.region}/{self.name}{sub_path}/')
3783
3787
  with rich_utils.safe_status(
3784
3788
  ux_utils.spinner_message(f'Syncing {sync_path}',
3785
3789
  log_path=log_path)):
@@ -4178,15 +4182,21 @@ class OciStore(AbstractStore):
4178
4182
  set to True, the directory is created in the bucket root and
4179
4183
  contents are uploaded to it.
4180
4184
  """
4185
+ sub_path = (f'{self._bucket_sub_path}/'
4186
+ if self._bucket_sub_path else '')
4181
4187
 
4182
4188
  @oci.with_oci_env
4183
4189
  def get_file_sync_command(base_dir_path, file_names):
4184
4190
  includes = ' '.join(
4185
4191
  [f'--include "{file_name}"' for file_name in file_names])
4192
+ prefix_arg = ''
4193
+ if sub_path:
4194
+ prefix_arg = f'--object-prefix "{sub_path.strip("/")}"'
4186
4195
  sync_command = (
4187
4196
  'oci os object bulk-upload --no-follow-symlinks --overwrite '
4188
4197
  f'--bucket-name {self.name} --namespace-name {self.namespace} '
4189
4198
  f'--region {self.region} --src-dir "{base_dir_path}" '
4199
+ f'{prefix_arg} '
4190
4200
  f'{includes}')
4191
4201
 
4192
4202
  return sync_command
@@ -4207,7 +4217,8 @@ class OciStore(AbstractStore):
4207
4217
  sync_command = (
4208
4218
  'oci os object bulk-upload --no-follow-symlinks --overwrite '
4209
4219
  f'--bucket-name {self.name} --namespace-name {self.namespace} '
4210
- f'--region {self.region} --object-prefix "{dest_dir_name}" '
4220
+ f'--region {self.region} '
4221
+ f'--object-prefix "{sub_path}{dest_dir_name}" '
4211
4222
  f'--src-dir "{src_dir_path}" {excludes}')
4212
4223
 
4213
4224
  return sync_command
@@ -4220,7 +4231,7 @@ class OciStore(AbstractStore):
4220
4231
 
4221
4232
  log_path = sky_logging.generate_tmp_logging_file_path(
4222
4233
  _STORAGE_LOG_FILE_NAME)
4223
- sync_path = f'{source_message} -> oci://{self.name}/'
4234
+ sync_path = f'{source_message} -> oci://{self.name}/{sub_path}'
4224
4235
  with rich_utils.safe_status(
4225
4236
  ux_utils.spinner_message(f'Syncing {sync_path}',
4226
4237
  log_path=log_path)):
@@ -52,15 +52,12 @@ local_ray = [
52
52
 
53
53
  remote = [
54
54
  # Adopted from ray's setup.py:
55
- # https://github.com/ray-project/ray/blob/ray-2.4.0/python/setup.py
55
+ # https://github.com/ray-project/ray/blob/ray-2.9.3/python/setup.py#L251-L252
56
56
  # SkyPilot: != 1.48.0 is required to avoid the error where ray dashboard
57
57
  # fails to start when ray start is called (#2054).
58
58
  # Tracking issue: https://github.com/ray-project/ray/issues/30984
59
- 'grpcio >= 1.32.0, <= 1.49.1, != 1.48.0; python_version < \'3.10\' and sys_platform == \'darwin\'', # noqa:E501 pylint: disable=line-too-long
60
- 'grpcio >= 1.42.0, <= 1.49.1, != 1.48.0; python_version >= \'3.10\' and sys_platform == \'darwin\'', # noqa:E501 pylint: disable=line-too-long
61
- # Original issue: https://github.com/ray-project/ray/issues/33833
62
- 'grpcio >= 1.32.0, <= 1.51.3, != 1.48.0; python_version < \'3.10\' and sys_platform != \'darwin\'', # noqa:E501 pylint: disable=line-too-long
63
- 'grpcio >= 1.42.0, <= 1.51.3, != 1.48.0; python_version >= \'3.10\' and sys_platform != \'darwin\'', # noqa:E501 pylint: disable=line-too-long
59
+ 'grpcio >= 1.32.0, != 1.48.0; python_version < \'3.10\'',
60
+ 'grpcio >= 1.42.0, != 1.48.0; python_version >= \'3.10\'',
64
61
  # Adopted from ray's setup.py:
65
62
  # https://github.com/ray-project/ray/blob/ray-2.9.3/python/setup.py#L343
66
63
  'protobuf >= 3.15.3, != 3.19.5',
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: skypilot-nightly
3
- Version: 1.0.0.dev20250213
3
+ Version: 1.0.0.dev20250215
4
4
  Summary: SkyPilot: An intercloud broker for the clouds
5
5
  Author: SkyPilot Team
6
6
  License: Apache 2.0
@@ -83,10 +83,8 @@ Requires-Dist: ray[default]!=2.6.0,>=2.2.0; extra == "oci"
83
83
  Provides-Extra: kubernetes
84
84
  Requires-Dist: kubernetes!=32.0.0,>=20.0.0; extra == "kubernetes"
85
85
  Provides-Extra: remote
86
- Requires-Dist: grpcio!=1.48.0,<=1.49.1,>=1.32.0; (python_version < "3.10" and sys_platform == "darwin") and extra == "remote"
87
- Requires-Dist: grpcio!=1.48.0,<=1.49.1,>=1.42.0; (python_version >= "3.10" and sys_platform == "darwin") and extra == "remote"
88
- Requires-Dist: grpcio!=1.48.0,<=1.51.3,>=1.32.0; (python_version < "3.10" and sys_platform != "darwin") and extra == "remote"
89
- Requires-Dist: grpcio!=1.48.0,<=1.51.3,>=1.42.0; (python_version >= "3.10" and sys_platform != "darwin") and extra == "remote"
86
+ Requires-Dist: grpcio!=1.48.0,>=1.32.0; python_version < "3.10" and extra == "remote"
87
+ Requires-Dist: grpcio!=1.48.0,>=1.42.0; python_version >= "3.10" and extra == "remote"
90
88
  Requires-Dist: protobuf!=3.19.5,>=3.15.3; extra == "remote"
91
89
  Requires-Dist: pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3; extra == "remote"
92
90
  Provides-Extra: runpod
@@ -136,10 +134,8 @@ Requires-Dist: ray[default]!=2.6.0,>=2.2.0; extra == "all"
136
134
  Requires-Dist: oci; extra == "all"
137
135
  Requires-Dist: ray[default]!=2.6.0,>=2.2.0; extra == "all"
138
136
  Requires-Dist: kubernetes!=32.0.0,>=20.0.0; extra == "all"
139
- Requires-Dist: grpcio!=1.48.0,<=1.49.1,>=1.32.0; (python_version < "3.10" and sys_platform == "darwin") and extra == "all"
140
- Requires-Dist: grpcio!=1.48.0,<=1.49.1,>=1.42.0; (python_version >= "3.10" and sys_platform == "darwin") and extra == "all"
141
- Requires-Dist: grpcio!=1.48.0,<=1.51.3,>=1.32.0; (python_version < "3.10" and sys_platform != "darwin") and extra == "all"
142
- Requires-Dist: grpcio!=1.48.0,<=1.51.3,>=1.42.0; (python_version >= "3.10" and sys_platform != "darwin") and extra == "all"
137
+ Requires-Dist: grpcio!=1.48.0,>=1.32.0; python_version < "3.10" and extra == "all"
138
+ Requires-Dist: grpcio!=1.48.0,>=1.42.0; python_version >= "3.10" and extra == "all"
143
139
  Requires-Dist: protobuf!=3.19.5,>=3.15.3; extra == "all"
144
140
  Requires-Dist: pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3; extra == "all"
145
141
  Requires-Dist: runpod>=1.6.1; extra == "all"
@@ -176,6 +172,10 @@ Dynamic: summary
176
172
  <img alt="Join Slack" src="https://img.shields.io/badge/SkyPilot-Join%20Slack-blue?logo=slack">
177
173
  </a>
178
174
 
175
+ <a href="https://github.com/skypilot-org/skypilot/releases">
176
+ <img alt="Downloads" src="https://img.shields.io/pypi/dm/skypilot">
177
+ </a>
178
+
179
179
  </p>
180
180
 
181
181
  <h3 align="center">
@@ -184,42 +184,25 @@ Dynamic: summary
184
184
 
185
185
  ----
186
186
  :fire: *News* :fire:
187
- - [Jan 2025] Launch and Serve **[DeepSeek-R1](https://github.com/deepseek-ai/DeepSeek-R1)** and **[Janus](https://github.com/deepseek-ai/DeepSeek-Janus)** on Kubernetes or Any Cloud: [**R1 example**](./llm/deepseek-r1/) and [**Janus example**](./llm/deepseek-janus/)
187
+ - [Feb 2025] Run and serve **DeepSeek-R1 671B** using SkyPilot and SGLang with high throughput: [**example**](./llm/deepseek-r1/)
188
+ - [Feb 2025] Prepare and serve large-scale image search with **vector databases**: [**blog post**](https://blog.skypilot.co/large-scale-vector-database/), [**example**](./examples/vector_database/)
189
+ - [Jan 2025] Launch and serve distilled models from **[DeepSeek-R1](https://github.com/deepseek-ai/DeepSeek-R1)** and **[Janus](https://github.com/deepseek-ai/DeepSeek-Janus)** on Kubernetes or any cloud: [**R1 example**](./llm/deepseek-r1-distilled/) and [**Janus example**](./llm/deepseek-janus/)
188
190
  - [Oct 2024] :tada: **SkyPilot crossed 1M+ downloads** :tada:: Thank you to our community! [**Twitter/X**](https://x.com/skypilot_org/status/1844770841718067638)
189
- - [Sep 2024] Point, Launch and Serve **Llama 3.2** on Kubernetes or Any Cloud: [**example**](./llm/llama-3_2/)
191
+ - [Sep 2024] Point, launch and serve **Llama 3.2** on Kubernetes or any cloud: [**example**](./llm/llama-3_2/)
190
192
  - [Sep 2024] Run and deploy [**Pixtral**](./llm/pixtral), the first open-source multimodal model from Mistral AI.
191
193
  - [Jun 2024] Reproduce **GPT** with [llm.c](https://github.com/karpathy/llm.c/discussions/481) on any cloud: [**guide**](./llm/gpt-2/)
192
194
  - [Apr 2024] Serve [**Qwen-110B**](https://qwenlm.github.io/blog/qwen1.5-110b/) on your infra: [**example**](./llm/qwen/)
193
- - [Apr 2024] Using [**Ollama**](https://github.com/ollama/ollama) to deploy quantized LLMs on CPUs and GPUs: [**example**](./llm/ollama/)
194
- - [Feb 2024] Deploying and scaling [**Gemma**](https://blog.google/technology/developers/gemma-open-models/) with SkyServe: [**example**](./llm/gemma/)
195
- - [Feb 2024] Serving [**Code Llama 70B**](https://ai.meta.com/blog/code-llama-large-language-model-coding/) with vLLM and SkyServe: [**example**](./llm/codellama/)
196
- - [Dec 2023] [**Mixtral 8x7B**](https://mistral.ai/news/mixtral-of-experts/), a high quality sparse mixture-of-experts model, was released by Mistral AI! Deploy via SkyPilot on any cloud: [**example**](./llm/mixtral/)
197
- - [Nov 2023] Using [**Axolotl**](https://github.com/OpenAccess-AI-Collective/axolotl) to finetune Mistral 7B on the cloud (on-demand and spot): [**example**](./llm/axolotl/)
195
+ - [Apr 2024] Host [**Ollama**](https://github.com/ollama/ollama) on the cloud to deploy LLMs on CPUs and GPUs: [**example**](./llm/ollama/)
198
196
 
199
- **LLM Finetuning Cookbooks**: Finetuning Llama 2 / Llama 3.1 in your own cloud environment, privately: Llama 2 [**example**](./llm/vicuna-llama-2/) and [**blog**](https://blog.skypilot.co/finetuning-llama2-operational-guide/); Llama 3.1 [**example**](./llm/llama-3_1-finetuning/) and [**blog**](https://blog.skypilot.co/finetune-llama-3_1-on-your-infra/)
200
197
 
201
- <details>
202
- <summary>Archived</summary>
203
-
204
- - [Jul 2024] [**Finetune**](./llm/llama-3_1-finetuning/) and [**serve**](./llm/llama-3_1/) **Llama 3.1** on your infra
205
- - [Apr 2024] Serve and finetune [**Llama 3**](https://docs.skypilot.co/en/latest/gallery/llms/llama-3.html) on any cloud or Kubernetes: [**example**](./llm/llama-3/)
206
- - [Mar 2024] Serve and deploy [**Databricks DBRX**](https://www.databricks.com/blog/introducing-dbrx-new-state-art-open-llm) on your infra: [**example**](./llm/dbrx/)
207
- - [Feb 2024] Speed up your LLM deployments with [**SGLang**](https://github.com/sgl-project/sglang) for 5x throughput on SkyServe: [**example**](./llm/sglang/)
208
- - [Dec 2023] Using [**LoRAX**](https://github.com/predibase/lorax) to serve 1000s of finetuned LLMs on a single instance in the cloud: [**example**](./llm/lorax/)
209
- - [Sep 2023] [**Mistral 7B**](https://mistral.ai/news/announcing-mistral-7b/), a high-quality open LLM, was released! Deploy via SkyPilot on any cloud: [**Mistral docs**](https://docs.mistral.ai/self-deployment/skypilot)
210
- - [Sep 2023] Case study: [**Covariant**](https://covariant.ai/) transformed AI development on the cloud using SkyPilot, delivering models 4x faster cost-effectively: [**read the case study**](https://blog.skypilot.co/covariant/)
211
- - [Jul 2023] Self-Hosted **Llama-2 Chatbot** on Any Cloud: [**example**](./llm/llama-2/)
212
- - [Jun 2023] Serving LLM 24x Faster On the Cloud [**with vLLM**](https://vllm.ai/) and SkyPilot: [**example**](./llm/vllm/), [**blog post**](https://blog.skypilot.co/serving-llm-24x-faster-on-the-cloud-with-vllm-and-skypilot/)
213
- - [Apr 2023] [SkyPilot YAMLs](./llm/vicuna/) for finetuning & serving the [Vicuna LLM](https://lmsys.org/blog/2023-03-30-vicuna/) with a single command!
214
-
215
- </details>
198
+ **LLM Finetuning Cookbooks**: Finetuning Llama 2 / Llama 3.1 in your own cloud environment, privately: Llama 2 [**example**](./llm/vicuna-llama-2/) and [**blog**](https://blog.skypilot.co/finetuning-llama2-operational-guide/); Llama 3.1 [**example**](./llm/llama-3_1-finetuning/) and [**blog**](https://blog.skypilot.co/finetune-llama-3_1-on-your-infra/)
216
199
 
217
200
  ----
218
201
 
219
202
  SkyPilot is a framework for running AI and batch workloads on any infra, offering unified execution, high cost savings, and high GPU availability.
220
203
 
221
204
  SkyPilot **abstracts away infra burdens**:
222
- - Launch [dev clusters](https://docs.skypilot.co/en/latest/examples/interactive-development.html), [jobs](https://docs.skypilot.co/en/latest/examples/managed-jobs.html), and [serving](https://docs.skypilot.co/en/latest/serving/sky-serve.html) on any infra
205
+ - Launch [clusters](https://docs.skypilot.co/en/latest/examples/interactive-development.html), [jobs](https://docs.skypilot.co/en/latest/examples/managed-jobs.html), and [serving](https://docs.skypilot.co/en/latest/serving/sky-serve.html) on any infra
223
206
  - Easy job management: queue, run, and auto-recover many jobs
224
207
 
225
208
  SkyPilot **supports multiple clusters, clouds, and hardware** ([the Sky](https://arxiv.org/abs/2205.07147)):
@@ -311,7 +294,7 @@ SkyPilot then performs the heavy-lifting for you, including:
311
294
  Refer to [Quickstart](https://docs.skypilot.co/en/latest/getting-started/quickstart.html) to get started with SkyPilot.
312
295
 
313
296
  ## More Information
314
- To learn more, see [Concept: Sky Computing](https://docs.skypilot.co/en/latest/sky-computing.html), [SkyPilot docs](https://docs.skypilot.co/en/latest/), and [SkyPilot blog](https://blog.skypilot.co/).
297
+ To learn more, see [SkyPilot Overview](https://docs.skypilot.co/en/latest/overview.html), [SkyPilot docs](https://docs.skypilot.co/en/latest/), and [SkyPilot blog](https://blog.skypilot.co/).
315
298
 
316
299
  <!-- Keep this section in sync with index.rst in SkyPilot Docs -->
317
300
  Runnable examples:
@@ -342,7 +325,7 @@ Runnable examples:
342
325
  - [LocalGPT](./llm/localgpt)
343
326
  - [Falcon](./llm/falcon)
344
327
  - Add yours here & see more in [`llm/`](./llm)!
345
- - Framework examples: [PyTorch DDP](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_torch.yaml), [DeepSpeed](./examples/deepspeed-multinode/sky.yaml), [JAX/Flax on TPU](https://github.com/skypilot-org/skypilot/blob/master/examples/tpu/tpuvm_mnist.yaml), [Stable Diffusion](https://github.com/skypilot-org/skypilot/tree/master/examples/stable_diffusion), [Detectron2](https://github.com/skypilot-org/skypilot/blob/master/examples/detectron2_docker.yaml), [Distributed](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_tf_app.py) [TensorFlow](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_app_storage.yaml), [Ray Train](examples/distributed_ray_train/ray_train.yaml), [NeMo](https://github.com/skypilot-org/skypilot/blob/master/examples/nemo/), [programmatic grid search](https://github.com/skypilot-org/skypilot/blob/master/examples/huggingface_glue_imdb_grid_search_app.py), [Docker](https://github.com/skypilot-org/skypilot/blob/master/examples/docker/echo_app.yaml), [Cog](https://github.com/skypilot-org/skypilot/blob/master/examples/cog/), [Unsloth](https://github.com/skypilot-org/skypilot/blob/master/examples/unsloth/unsloth.yaml), [Ollama](https://github.com/skypilot-org/skypilot/blob/master/llm/ollama), [llm.c](https://github.com/skypilot-org/skypilot/tree/master/llm/gpt-2), [Airflow](./examples/airflow/training_workflow) and [many more (`examples/`)](./examples).
328
+ - Framework examples: [Vector Database](./examples/vector_database/), [PyTorch DDP](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_torch.yaml), [DeepSpeed](./examples/deepspeed-multinode/sky.yaml), [JAX/Flax on TPU](https://github.com/skypilot-org/skypilot/blob/master/examples/tpu/tpuvm_mnist.yaml), [Stable Diffusion](https://github.com/skypilot-org/skypilot/tree/master/examples/stable_diffusion), [Detectron2](https://github.com/skypilot-org/skypilot/blob/master/examples/detectron2_docker.yaml), [Distributed](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_distributed_tf_app.py) [TensorFlow](https://github.com/skypilot-org/skypilot/blob/master/examples/resnet_app_storage.yaml), [Ray Train](examples/distributed_ray_train/ray_train.yaml), [NeMo](https://github.com/skypilot-org/skypilot/blob/master/examples/nemo/), [programmatic grid search](https://github.com/skypilot-org/skypilot/blob/master/examples/huggingface_glue_imdb_grid_search_app.py), [Docker](https://github.com/skypilot-org/skypilot/blob/master/examples/docker/echo_app.yaml), [Cog](https://github.com/skypilot-org/skypilot/blob/master/examples/cog/), [Unsloth](https://github.com/skypilot-org/skypilot/blob/master/examples/unsloth/unsloth.yaml), [Ollama](https://github.com/skypilot-org/skypilot/blob/master/llm/ollama), [llm.c](https://github.com/skypilot-org/skypilot/tree/master/llm/gpt-2), [Airflow](./examples/airflow/training_workflow) and [many more (`examples/`)](./examples).
346
329
 
347
330
  Case Studies and Integrations: [Community Spotlights](https://blog.skypilot.co/community/)
348
331
 
@@ -357,7 +340,7 @@ Read the research:
357
340
  - [Sky Computing vision paper](https://sigops.org/s/conferences/hotos/2021/papers/hotos21-s02-stoica.pdf) (HotOS 2021)
358
341
  - [Policy for Managed Spot Jobs](https://www.usenix.org/conference/nsdi24/presentation/wu-zhanghao) (NSDI 2024)
359
342
 
360
- SkyPilot was initially started at the [Sky Computing Lab](https://sky.cs.berkeley.edu) at UC Berkeley and has since gained many industry contributors. Read more about the project's origin [here](https://docs.skypilot.co/en/latest/sky-computing.html).
343
+ SkyPilot was initially started at the [Sky Computing Lab](https://sky.cs.berkeley.edu) at UC Berkeley and has since gained many industry contributors. To read about the project's origin and vision, see [Concept: Sky Computing](https://docs.skypilot.co/en/latest/sky-computing.html).
361
344
 
362
345
  ## Support and Questions
363
346
  We are excited to hear your feedback!
@@ -1,4 +1,4 @@
1
- sky/__init__.py,sha256=ulWt-DtbuSpxFnOQtMOqMTPqAYFd6WgWd_T-bxS7_QM,5560
1
+ sky/__init__.py,sha256=A9WvRbIF2APLiWFQuaAyh5FXJCmlNyhluArUv3nkI_Y,5560
2
2
  sky/admin_policy.py,sha256=hPo02f_A32gCqhUueF0QYy1fMSSKqRwYEg_9FxScN_s,3248
3
3
  sky/authentication.py,sha256=MNc9uHnvQ1EsEl8SsrYcYCGbxcnDbR6gaRCXVNd5RZE,22338
4
4
  sky/check.py,sha256=xzLlxUkBCrzpOho8lw65EvKLPl_b9lA2nteF5MSYbDQ,10885
@@ -98,7 +98,7 @@ sky/data/__init__.py,sha256=Nhaf1NURisXpZuwWANa2IuCyppIuc720FRwqSE2oEwY,184
98
98
  sky/data/data_transfer.py,sha256=wixC4_3_JaeJFdGKOp-O5ulcsMugDSgrCR0SnPpugGc,8946
99
99
  sky/data/data_utils.py,sha256=HjcgMDuWRR_fNQ9gjuROi9GgPVvTGApiJwxGtdb2_UU,28860
100
100
  sky/data/mounting_utils.py,sha256=tJHBPEDP1Wg_r3oSGBwFhMDLnPCMPSFRz26O0QkDd0Y,14908
101
- sky/data/storage.py,sha256=CWVKnHhdzXw1biPbRqYizkyVexL_OCELuJCqtd4hit4,204094
101
+ sky/data/storage.py,sha256=vlipyOOxw43W6t5O9ccL53OYWAxPVMjF16qlfbZhdQo,204431
102
102
  sky/data/storage_utils.py,sha256=cM3kxlffYE7PnJySDu8huyUsMX_JYsf9uer8r5OYsjo,9556
103
103
  sky/jobs/__init__.py,sha256=ObZcz3lL1ip8JcmR6gbfZ4RMMfXJJdsnuU2zLQUb8jY,1546
104
104
  sky/jobs/constants.py,sha256=9kIdpwWNI9zWKQO39LTg9spUMGl5Iqx4ByIjRlic7Hw,1893
@@ -208,7 +208,7 @@ sky/serve/serve_utils.py,sha256=m1Zcjslnzcr5AAppzV48WDOwMWjRaXotTUd_iN-dHgc,4065
208
208
  sky/serve/service.py,sha256=DPU1PJGuHa1WaNqxYqgpmqd4LA9jBbQM-KlLrA6C1M0,12156
209
209
  sky/serve/service_spec.py,sha256=Q0qnFRjNnfGIpksubH5VqPKIlvpWs5had_Ma_PSHyo8,16940
210
210
  sky/setup_files/MANIFEST.in,sha256=WF0T89NLichHxZDDSQzvSpiONtAEFyur2MPmGczgTIo,555
211
- sky/setup_files/dependencies.py,sha256=hHa5EU8pE-cNZYoU4gWxnx08ykGkdGZKUIowB3Wo5Q8,6271
211
+ sky/setup_files/dependencies.py,sha256=iCVL37XzDj4PmYjbGtQsq5Nd4BCNKsn_FIIZ4FXck9I,5746
212
212
  sky/setup_files/setup.py,sha256=HMqAIxHrhtQUOlm6_Iz5E_bL4dUvsYgXc9YVQIFayPs,7417
213
213
  sky/skylet/LICENSE,sha256=BnFrJSvUFpMUoH5mOpWnEvaC5R6Uux8W6WXgrte8iYg,12381
214
214
  sky/skylet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -298,9 +298,9 @@ sky/utils/kubernetes/k8s_gpu_labeler_job.yaml,sha256=k0TBoQ4zgf79-sVkixKSGYFHQ7Z
298
298
  sky/utils/kubernetes/k8s_gpu_labeler_setup.yaml,sha256=VLKT2KKimZu1GDg_4AIlIt488oMQvhRZWwsj9vBbPUg,3812
299
299
  sky/utils/kubernetes/rsync_helper.sh,sha256=h4YwrPFf9727CACnMJvF3EyK_0OeOYKKt4su_daKekw,1256
300
300
  sky/utils/kubernetes/ssh_jump_lifecycle_manager.py,sha256=Kq1MDygF2IxFmu9FXpCxqucXLmeUrvs6OtRij6XTQbo,6554
301
- skypilot_nightly-1.0.0.dev20250213.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
302
- skypilot_nightly-1.0.0.dev20250213.dist-info/METADATA,sha256=hLWjPTEc6CnhdlKg2uvdj1VZgjmf3XNt0fJV3R61GTc,21397
303
- skypilot_nightly-1.0.0.dev20250213.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
304
- skypilot_nightly-1.0.0.dev20250213.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
305
- skypilot_nightly-1.0.0.dev20250213.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
306
- skypilot_nightly-1.0.0.dev20250213.dist-info/RECORD,,
301
+ skypilot_nightly-1.0.0.dev20250215.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
302
+ skypilot_nightly-1.0.0.dev20250215.dist-info/METADATA,sha256=gT-Eg0fwPo_B0dWCRD3V7MuaSB-4ZUtYsrzgcfQUVdQ,18853
303
+ skypilot_nightly-1.0.0.dev20250215.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
304
+ skypilot_nightly-1.0.0.dev20250215.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
305
+ skypilot_nightly-1.0.0.dev20250215.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
306
+ skypilot_nightly-1.0.0.dev20250215.dist-info/RECORD,,