skypilot-nightly 1.0.0.dev20250612__py3-none-any.whl → 1.0.0.dev20250614__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. sky/__init__.py +4 -2
  2. sky/adaptors/hyperbolic.py +8 -0
  3. sky/adaptors/kubernetes.py +3 -2
  4. sky/authentication.py +20 -2
  5. sky/backends/backend_utils.py +11 -3
  6. sky/backends/cloud_vm_ray_backend.py +2 -1
  7. sky/benchmark/benchmark_state.py +2 -1
  8. sky/catalog/data_fetchers/fetch_aws.py +1 -1
  9. sky/catalog/data_fetchers/fetch_hyperbolic.py +136 -0
  10. sky/catalog/data_fetchers/fetch_vast.py +1 -1
  11. sky/catalog/hyperbolic_catalog.py +133 -0
  12. sky/check.py +2 -1
  13. sky/cli.py +1 -1
  14. sky/client/cli.py +1 -1
  15. sky/clouds/__init__.py +2 -0
  16. sky/clouds/cloud.py +1 -1
  17. sky/clouds/gcp.py +1 -1
  18. sky/clouds/hyperbolic.py +276 -0
  19. sky/clouds/kubernetes.py +8 -2
  20. sky/clouds/ssh.py +7 -3
  21. sky/dashboard/out/404.html +1 -1
  22. sky/dashboard/out/_next/static/chunks/37-7754056a4b503e1d.js +6 -0
  23. sky/dashboard/out/_next/static/chunks/600.bd2ed8c076b720ec.js +16 -0
  24. sky/dashboard/out/_next/static/chunks/{856-0776dc6ed6000c39.js → 856-c2c39c0912285e54.js} +1 -1
  25. sky/dashboard/out/_next/static/chunks/938-245c9ac4c9e8bf15.js +1 -0
  26. sky/dashboard/out/_next/static/chunks/{webpack-208a9812ab4f61c9.js → webpack-27de3d9d450d81c6.js} +1 -1
  27. sky/dashboard/out/_next/static/css/{5d71bfc09f184bab.css → 6f84444b8f3c656c.css} +1 -1
  28. sky/dashboard/out/_next/static/{G3DXdMFu2Jzd-Dody9iq1 → nm5jrKpUZh2W0SxzyDKhz}/_buildManifest.js +1 -1
  29. sky/dashboard/out/clusters/[cluster]/[job].html +1 -1
  30. sky/dashboard/out/clusters/[cluster].html +1 -1
  31. sky/dashboard/out/clusters.html +1 -1
  32. sky/dashboard/out/config.html +1 -1
  33. sky/dashboard/out/index.html +1 -1
  34. sky/dashboard/out/infra/[context].html +1 -1
  35. sky/dashboard/out/infra.html +1 -1
  36. sky/dashboard/out/jobs/[job].html +1 -1
  37. sky/dashboard/out/jobs.html +1 -1
  38. sky/dashboard/out/users.html +1 -1
  39. sky/dashboard/out/workspace/new.html +1 -1
  40. sky/dashboard/out/workspaces/[name].html +1 -1
  41. sky/dashboard/out/workspaces.html +1 -1
  42. sky/data/storage.py +2 -2
  43. sky/jobs/state.py +43 -44
  44. sky/provision/__init__.py +1 -0
  45. sky/provision/common.py +1 -1
  46. sky/provision/gcp/config.py +1 -1
  47. sky/provision/hyperbolic/__init__.py +11 -0
  48. sky/provision/hyperbolic/config.py +10 -0
  49. sky/provision/hyperbolic/instance.py +423 -0
  50. sky/provision/hyperbolic/utils.py +373 -0
  51. sky/provision/kubernetes/instance.py +2 -1
  52. sky/provision/kubernetes/utils.py +60 -13
  53. sky/resources.py +2 -2
  54. sky/serve/serve_state.py +81 -15
  55. sky/server/requests/preconditions.py +1 -1
  56. sky/server/requests/requests.py +11 -6
  57. sky/setup_files/dependencies.py +2 -1
  58. sky/skylet/configs.py +26 -19
  59. sky/skylet/constants.py +1 -1
  60. sky/skylet/job_lib.py +3 -5
  61. sky/task.py +1 -1
  62. sky/templates/hyperbolic-ray.yml.j2 +67 -0
  63. sky/templates/kubernetes-ray.yml.j2 +1 -1
  64. sky/users/permission.py +2 -0
  65. sky/utils/common_utils.py +6 -0
  66. sky/utils/context.py +1 -1
  67. sky/utils/infra_utils.py +1 -1
  68. sky/utils/kubernetes/generate_kubeconfig.sh +1 -1
  69. {skypilot_nightly-1.0.0.dev20250612.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/METADATA +2 -1
  70. {skypilot_nightly-1.0.0.dev20250612.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/RECORD +79 -70
  71. sky/dashboard/out/_next/static/chunks/37-d8aebf1683522a0b.js +0 -6
  72. sky/dashboard/out/_next/static/chunks/600.15a0009177e86b86.js +0 -16
  73. sky/dashboard/out/_next/static/chunks/938-ab185187a63f9cdb.js +0 -1
  74. /sky/dashboard/out/_next/static/chunks/{843-6fcc4bf91ac45b39.js → 843-5011affc9540757f.js} +0 -0
  75. /sky/dashboard/out/_next/static/chunks/pages/{_app-7bbd9d39d6f9a98a.js → _app-664031f6ae737f80.js} +0 -0
  76. /sky/dashboard/out/_next/static/chunks/pages/clusters/{[cluster]-451a14e7e755ebbc.js → [cluster]-20210f8cd809063d.js} +0 -0
  77. /sky/dashboard/out/_next/static/chunks/pages/{jobs-fe233baf3d073491.js → jobs-ae7a5e9fa5a5b5f0.js} +0 -0
  78. /sky/dashboard/out/_next/static/{G3DXdMFu2Jzd-Dody9iq1 → nm5jrKpUZh2W0SxzyDKhz}/_ssgManifest.js +0 -0
  79. {skypilot_nightly-1.0.0.dev20250612.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/WHEEL +0 -0
  80. {skypilot_nightly-1.0.0.dev20250612.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/entry_points.txt +0 -0
  81. {skypilot_nightly-1.0.0.dev20250612.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/licenses/LICENSE +0 -0
  82. {skypilot_nightly-1.0.0.dev20250612.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/top_level.txt +0 -0
sky/__init__.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import urllib.request
6
6
 
7
7
  # Replaced with the current commit when building the wheels.
8
- _SKYPILOT_COMMIT_SHA = '88962061c851edfe5dfcfe12a1c3cf63a703dfd8'
8
+ _SKYPILOT_COMMIT_SHA = '660d2bde857362e597fa3c30252693fe4d8c3939'
9
9
 
10
10
 
11
11
  def _get_git_commit():
@@ -35,7 +35,7 @@ def _get_git_commit():
35
35
 
36
36
 
37
37
  __commit__ = _get_git_commit()
38
- __version__ = '1.0.0.dev20250612'
38
+ __version__ = '1.0.0.dev20250614'
39
39
  __root_dir__ = os.path.dirname(os.path.abspath(__file__))
40
40
 
41
41
 
@@ -144,6 +144,7 @@ Vast = clouds.Vast
144
144
  Vsphere = clouds.Vsphere
145
145
  Fluidstack = clouds.Fluidstack
146
146
  Nebius = clouds.Nebius
147
+ Hyperbolic = clouds.Hyperbolic
147
148
 
148
149
  __all__ = [
149
150
  '__version__',
@@ -163,6 +164,7 @@ __all__ = [
163
164
  'Vsphere',
164
165
  'Fluidstack',
165
166
  'Nebius',
167
+ 'Hyperbolic',
166
168
  'Optimizer',
167
169
  'OptimizeTarget',
168
170
  'backends',
@@ -0,0 +1,8 @@
1
+ """Hyperbolic cloud adaptor."""
2
+
3
+ from sky.adaptors import common
4
+
5
+ hyperbolic = common.LazyImport(
6
+ 'hyperbolic',
7
+ import_error_message='Failed to import dependencies for Hyperbolic. '
8
+ 'Try running: pip install "skypilot[hyperbolic]"')
@@ -93,12 +93,13 @@ def _load_config(context: Optional[str] = None):
93
93
  context_name = '(current-context)' if context is None else context
94
94
  is_ssh_node_pool = False
95
95
  if context_name.startswith('ssh-'):
96
- context_name = context_name.lstrip('ssh-')
96
+ context_name = common_utils.removeprefix(context_name, 'ssh-')
97
97
  is_ssh_node_pool = True
98
98
  # Check if exception was due to no current-context
99
99
  if 'Expected key current-context' in str(e):
100
100
  if is_ssh_node_pool:
101
- context_name = context_name.lstrip('ssh-')
101
+ context_name = common_utils.removeprefix(
102
+ context_name, 'ssh-')
102
103
  err_str = ('Failed to load SSH Node Pool configuration for '
103
104
  f'{context_name!r}.\n'
104
105
  ' Run `sky ssh up --infra {context_name}` to '
sky/authentication.py CHANGED
@@ -432,8 +432,8 @@ def setup_kubernetes_authentication(config: Dict[str, Any]) -> Dict[str, Any]:
432
432
  # Add message saying "Please check: ~/.sky/config.yaml" to the error
433
433
  # message.
434
434
  with ux_utils.print_exception_no_traceback():
435
- raise ValueError(str(e) + ' Please check: ~/.sky/config.yaml.') \
436
- from None
435
+ raise ValueError(str(e) +
436
+ ' Please check: ~/.sky/config.yaml.') from None
437
437
  _, public_key_path = get_or_generate_keys()
438
438
 
439
439
  # Add the user's public key to the SkyPilot cluster.
@@ -567,3 +567,21 @@ def setup_fluidstack_authentication(config: Dict[str, Any]) -> Dict[str, Any]:
567
567
  client.get_or_add_ssh_key(public_key)
568
568
  config['auth']['ssh_public_key'] = public_key_path
569
569
  return configure_ssh_info(config)
570
+
571
+
572
+ def setup_hyperbolic_authentication(config: Dict[str, Any]) -> Dict[str, Any]:
573
+ """Sets up SSH authentication for Hyperbolic."""
574
+ _, public_key_path = get_or_generate_keys()
575
+ with open(public_key_path, 'r', encoding='utf-8') as f:
576
+ public_key = f.read().strip()
577
+
578
+ # TODO: adjust below to use public_keys instead of
579
+ # public_key once backwards-compatibility is no longer required
580
+ config['publicKey'] = public_key
581
+
582
+ # Set up auth section for Ray template
583
+ config.setdefault('auth', {})
584
+ config['auth']['ssh_user'] = 'ubuntu'
585
+ config['auth']['ssh_public_key'] = public_key_path
586
+
587
+ return configure_ssh_info(config)
@@ -126,7 +126,7 @@ _ENDPOINTS_RETRY_MESSAGE = ('If the cluster was recently started, '
126
126
  'please retry after a while.')
127
127
 
128
128
  # If a cluster is less than LAUNCH_DOUBLE_CHECK_WINDOW seconds old, and we don't
129
- # see any instances in the cloud, the instances might be in the proccess of
129
+ # see any instances in the cloud, the instances might be in the process of
130
130
  # being created. We will wait LAUNCH_DOUBLE_CHECK_DELAY seconds and then double
131
131
  # check to make sure there are still no instances. LAUNCH_DOUBLE_CHECK_DELAY
132
132
  # should be set longer than the delay between (sending the create instance
@@ -341,7 +341,13 @@ def path_size_megabytes(path: str) -> int:
341
341
  f'{git_exclude_filter} --dry-run {path!r}')
342
342
  rsync_output = ''
343
343
  try:
344
- rsync_output = str(subprocess.check_output(rsync_command, shell=True))
344
+ # rsync sometimes fails `--dry-run` for MacOS' rsync build, however this function is only used to display
345
+ # a warning message to the user if the size of a file/directory is too
346
+ # large, so we can safely ignore the error.
347
+ rsync_output = str(
348
+ subprocess.check_output(rsync_command,
349
+ shell=True,
350
+ stderr=subprocess.DEVNULL))
345
351
  except subprocess.CalledProcessError:
346
352
  logger.debug('Command failed, proceeding without estimating size: '
347
353
  f'{rsync_command}')
@@ -933,6 +939,8 @@ def _add_auth_to_cluster_config(cloud: clouds.Cloud, tmp_yaml_path: str):
933
939
  config = auth.setup_vast_authentication(config)
934
940
  elif isinstance(cloud, clouds.Fluidstack):
935
941
  config = auth.setup_fluidstack_authentication(config)
942
+ elif isinstance(cloud, clouds.Hyperbolic):
943
+ config = auth.setup_hyperbolic_authentication(config)
936
944
  else:
937
945
  assert False, cloud
938
946
  common_utils.dump_yaml(tmp_yaml_path, config)
@@ -2502,7 +2510,7 @@ def is_controller_accessible(
2502
2510
  need_connection_check):
2503
2511
  # Check ssh connection if (1) controller is in INIT state, or (2) we failed to fetch the
2504
2512
  # status, both of which can happen when controller's status lock is held by another `sky jobs launch` or
2505
- # `sky serve up`. If we have controller's head_ip available and it is ssh-reachable,
2513
+ # `sky serve up`. If we have controller's head_ip available and it is ssh-reachable,
2506
2514
  # we can allow access to the controller.
2507
2515
  ssh_credentials = ssh_credential_from_yaml(handle.cluster_yaml,
2508
2516
  handle.docker_user,
@@ -196,7 +196,8 @@ def _get_cluster_config_template(cloud):
196
196
  clouds.Vsphere: 'vsphere-ray.yml.j2',
197
197
  clouds.Vast: 'vast-ray.yml.j2',
198
198
  clouds.Fluidstack: 'fluidstack-ray.yml.j2',
199
- clouds.Nebius: 'nebius-ray.yml.j2'
199
+ clouds.Nebius: 'nebius-ray.yml.j2',
200
+ clouds.Hyperbolic: 'hyperbolic-ray.yml.j2'
200
201
  }
201
202
  return cloud_to_template[type(cloud)]
202
203
 
@@ -17,7 +17,6 @@ _BENCHMARK_BUCKET_NAME_KEY = 'bucket_name'
17
17
  _BENCHMARK_BUCKET_TYPE_KEY = 'bucket_type'
18
18
 
19
19
  _BENCHMARK_DB_PATH = os.path.expanduser('~/.sky/benchmark.db')
20
- os.makedirs(pathlib.Path(_BENCHMARK_DB_PATH).parents[0], exist_ok=True)
21
20
 
22
21
 
23
22
  class _BenchmarkSQLiteConn(threading.local):
@@ -80,6 +79,8 @@ def _init_db(func):
80
79
  return func(*args, **kwargs)
81
80
  with _benchmark_db_init_lock:
82
81
  if not _BENCHMARK_DB:
82
+ os.makedirs(pathlib.Path(_BENCHMARK_DB_PATH).parents[0],
83
+ exist_ok=True)
83
84
  _BENCHMARK_DB = _BenchmarkSQLiteConn()
84
85
  return func(*args, **kwargs)
85
86
 
@@ -277,7 +277,7 @@ def _get_instance_types_df(region: str) -> Union[str, 'pd.DataFrame']:
277
277
  try:
278
278
  return float(row['VCpuInfo']['DefaultVCpus'])
279
279
  except Exception as e: # pylint: disable=broad-except
280
- print('Error occured for row:', row)
280
+ print('Error occurred for row:', row)
281
281
  print('Error:', e)
282
282
  raise
283
283
 
@@ -0,0 +1,136 @@
1
+ """Script to fetch Hyperbolic instance data and generate catalog."""
2
+ import argparse
3
+ import csv
4
+ import json
5
+ import os
6
+ import sys
7
+ from typing import Any, Dict
8
+
9
+ import requests
10
+
11
+ ENDPOINT = 'https://api.hyperbolic.xyz/v2/skypilot/catalog'
12
+ API_KEY_PATH = os.path.expanduser('~/.hyperbolic/api_key')
13
+
14
+ REQUIRED_FIELDS = [
15
+ 'InstanceType', 'AcceleratorName', 'AcceleratorCount', 'vCPUs', 'MemoryGiB',
16
+ 'StorageGiB', 'Price', 'Region', 'GpuInfo', 'SpotPrice'
17
+ ]
18
+
19
+
20
+ class HyperbolicCatalogError(Exception):
21
+ """Base exception for Hyperbolic catalog errors."""
22
+ pass
23
+
24
+
25
+ def get_api_key(api_key=None) -> str:
26
+ """Get API key from arg, env var, or file."""
27
+ if api_key:
28
+ return api_key
29
+ if api_key := os.environ.get('HYPERBOLIC_API_KEY'):
30
+ return api_key
31
+ try:
32
+ with open(API_KEY_PATH, 'r', encoding='utf-8') as f:
33
+ return f.read().strip()
34
+ except FileNotFoundError as exc:
35
+ raise HyperbolicCatalogError(
36
+ 'No API key found. Please either:\n'
37
+ '1. Pass --api-key\n'
38
+ '2. Set HYPERBOLIC_API_KEY environment variable\n'
39
+ '3. Create ~/.hyperbolic/api_key file') from exc
40
+
41
+
42
+ def get_output_path() -> str:
43
+ """Get output path for catalog file."""
44
+ current_dir = os.getcwd()
45
+ if os.path.basename(current_dir) == 'hyperbolic':
46
+ return 'vms.csv'
47
+ hyperbolic_dir = os.path.join(current_dir, 'hyperbolic')
48
+ os.makedirs(hyperbolic_dir, exist_ok=True)
49
+ return os.path.join(hyperbolic_dir, 'vms.csv')
50
+
51
+
52
+ def validate_instance_data(instance: Dict[str, Any]) -> None:
53
+ """Validate instance data has all required fields."""
54
+ missing_fields = [
55
+ field for field in REQUIRED_FIELDS if field not in instance
56
+ ]
57
+ if missing_fields:
58
+ raise HyperbolicCatalogError(
59
+ f'Instance data missing required fields: {missing_fields}')
60
+
61
+
62
+ def create_catalog(api_key=None) -> None:
63
+ """Generate Hyperbolic catalog CSV file."""
64
+ try:
65
+ response = requests.get(
66
+ ENDPOINT,
67
+ headers={'Authorization': f'Bearer {get_api_key(api_key)}'},
68
+ timeout=30)
69
+ response.raise_for_status()
70
+
71
+ try:
72
+ data = response.json()
73
+ except json.JSONDecodeError as e:
74
+ raise HyperbolicCatalogError(
75
+ f'Invalid JSON response from API: {response.text}') from e
76
+
77
+ if 'vms' not in data:
78
+ raise HyperbolicCatalogError(
79
+ f'Missing "vms" field in API response: {data}')
80
+
81
+ instances = data['vms']
82
+ if not isinstance(instances, list):
83
+ raise HyperbolicCatalogError(
84
+ f'Expected list of instances, got {type(instances)}')
85
+
86
+ if not instances:
87
+ raise HyperbolicCatalogError('No instances found in API response')
88
+
89
+ # Validate each instance
90
+ for instance in instances:
91
+ validate_instance_data(instance)
92
+
93
+ except requests.exceptions.RequestException as e:
94
+ raise HyperbolicCatalogError(
95
+ f'Failed to fetch instance data: {e}') from e
96
+
97
+ output_path = get_output_path()
98
+ try:
99
+ with open(output_path, 'w', newline='', encoding='utf-8') as f:
100
+ writer = csv.DictWriter(f, fieldnames=REQUIRED_FIELDS)
101
+ writer.writeheader()
102
+
103
+ for instance in instances:
104
+ entry = instance.copy()
105
+ # Convert GpuInfo to string format
106
+ entry['GpuInfo'] = json.dumps(entry['GpuInfo'],
107
+ ensure_ascii=False).replace(
108
+ '"', "'") # pylint: disable=invalid-string-quote
109
+ writer.writerow(entry)
110
+ except (IOError, OSError) as e:
111
+ raise HyperbolicCatalogError(
112
+ f'Failed to write catalog file to {output_path}: {e}') from e
113
+
114
+
115
+ def main() -> int:
116
+ """Main entry point."""
117
+ parser = argparse.ArgumentParser(
118
+ description='Fetch Hyperbolic instance data')
119
+ parser.add_argument('--api-key', help='Hyperbolic API key')
120
+ args = parser.parse_args()
121
+
122
+ try:
123
+ create_catalog(args.api_key)
124
+ print(f'Hyperbolic Service Catalog saved to {get_output_path()}')
125
+ return 0
126
+ except HyperbolicCatalogError as e:
127
+ print(f'Error: {e}', file=sys.stderr)
128
+ return 1
129
+ except (requests.exceptions.RequestException, json.JSONDecodeError, IOError,
130
+ OSError) as e:
131
+ print(f'Unexpected error: {e}', file=sys.stderr)
132
+ return 1
133
+
134
+
135
+ if __name__ == '__main__':
136
+ sys.exit(main())
@@ -2,7 +2,7 @@
2
2
 
3
3
  #
4
4
  # Due to the design of the sdk, pylint has a false
5
- # positive for the fnctions.
5
+ # positive for the functions.
6
6
  #
7
7
  # pylint: disable=assignment-from-no-return
8
8
  import collections
@@ -0,0 +1,133 @@
1
+ """Hyperbolic Cloud service catalog.
2
+
3
+ This module loads and queries the service catalog for Hyperbolic Cloud.
4
+ """
5
+ from typing import Dict, List, Optional, Tuple, Union
6
+
7
+ from sky.catalog import common
8
+ from sky.clouds import cloud # Import cloud here for Region
9
+ from sky.utils import ux_utils
10
+
11
+ # Initialize cloud variable at module level
12
+ CLOUD = 'hyperbolic'
13
+
14
+ _df = common.read_catalog('hyperbolic/vms.csv')
15
+
16
+
17
+ def instance_type_exists(instance_type: str) -> bool:
18
+ return common.instance_type_exists_impl(_df, instance_type)
19
+
20
+
21
+ def validate_region_zone(
22
+ region: Optional[str],
23
+ zone: Optional[str]) -> Tuple[Optional[str], Optional[str]]:
24
+ if zone is not None:
25
+ with ux_utils.print_exception_no_traceback():
26
+ raise ValueError('Hyperbolic Cloud does not support zones.')
27
+ return common.validate_region_zone_impl('hyperbolic', _df, region, zone)
28
+
29
+
30
+ def get_hourly_cost(
31
+ instance_type: str,
32
+ use_spot: bool = False,
33
+ region: Optional[str] = None,
34
+ zone: Optional[str] = None,
35
+ ) -> float:
36
+ if zone is not None:
37
+ with ux_utils.print_exception_no_traceback():
38
+ raise ValueError('Hyperbolic Cloud does not support zones.')
39
+ return common.get_hourly_cost_impl(_df, instance_type, use_spot, region,
40
+ zone)
41
+
42
+
43
+ def get_vcpus_mem_from_instance_type(
44
+ instance_type: str,) -> Tuple[Optional[float], Optional[float]]:
45
+ return common.get_vcpus_mem_from_instance_type_impl(_df, instance_type)
46
+
47
+
48
+ def get_accelerators_from_instance_type(
49
+ instance_type: str) -> Optional[Dict[str, Union[int, float]]]:
50
+ return common.get_accelerators_from_instance_type_impl(_df, instance_type)
51
+
52
+
53
+ def get_vcpus_from_instance_type(instance_type: str) -> Optional[float]:
54
+ vcpus, _ = get_vcpus_mem_from_instance_type(instance_type)
55
+ return vcpus
56
+
57
+
58
+ def get_memory_from_instance_type(instance_type: str) -> Optional[float]:
59
+ _, mem = get_vcpus_mem_from_instance_type(instance_type)
60
+ return mem
61
+
62
+
63
+ def get_zone_shell_cmd() -> Optional[str]:
64
+ """Returns the shell command to obtain the zone."""
65
+ return None
66
+
67
+
68
+ def get_default_instance_type(cpus: Optional[str] = None,
69
+ memory: Optional[str] = None,
70
+ disk_tier: Optional[str] = None) -> Optional[str]:
71
+ del disk_tier # Unused
72
+ return common.get_instance_type_for_cpus_mem_impl(_df, cpus, memory)
73
+
74
+
75
+ def get_instance_type_for_accelerator(
76
+ acc_name: str,
77
+ acc_count: int,
78
+ cpus: Optional[str] = None,
79
+ memory: Optional[str] = None,
80
+ use_spot: bool = False,
81
+ region: Optional[str] = None,
82
+ zone: Optional[str] = None,
83
+ ) -> Tuple[Optional[List[str]], List[str]]:
84
+ if zone is not None:
85
+ with ux_utils.print_exception_no_traceback():
86
+ raise ValueError('Hyperbolic Cloud does not support zones.')
87
+ return common.get_instance_type_for_accelerator_impl(df=_df,
88
+ acc_name=acc_name,
89
+ acc_count=acc_count,
90
+ cpus=cpus,
91
+ memory=memory,
92
+ use_spot=use_spot,
93
+ region=region,
94
+ zone=zone)
95
+
96
+
97
+ def get_region_zones_for_instance_type(instance_type: str,
98
+ use_spot: bool) -> List[cloud.Region]:
99
+ df = _df[_df['InstanceType'] == instance_type]
100
+ return common.get_region_zones(df, use_spot)
101
+
102
+
103
+ def get_gen_version(instance_type: str) -> Optional[str]:
104
+ """Returns the generation version of the instance type."""
105
+ del instance_type # Unused
106
+ # TODO: Implement generation version detection
107
+ return None
108
+
109
+
110
+ def list_accelerators(
111
+ gpus_only: bool = True,
112
+ name_filter: Optional[str] = None,
113
+ region_filter: Optional[str] = None,
114
+ quantity_filter: Optional[int] = None,
115
+ case_sensitive: bool = True,
116
+ all_regions: bool = False,
117
+ require_price: bool = True,
118
+ ) -> Dict[str, List[common.InstanceTypeInfo]]:
119
+ """Returns all instance types in Hyperbolic Cloud offering accelerators."""
120
+ del require_price # Unused
121
+ return common.list_accelerators_impl('Hyperbolic', _df, gpus_only,
122
+ name_filter, region_filter,
123
+ quantity_filter, case_sensitive,
124
+ all_regions)
125
+
126
+
127
+ def get_instance_type_from_catalog() -> dict:
128
+ # TODO: Implement this function
129
+ return {}
130
+
131
+
132
+ def regions() -> List[cloud.Region]:
133
+ return [cloud.Region('default')]
sky/check.py CHANGED
@@ -16,6 +16,7 @@ from sky import skypilot_config
16
16
  from sky.adaptors import cloudflare
17
17
  from sky.clouds import cloud as sky_cloud
18
18
  from sky.skylet import constants
19
+ from sky.utils import common_utils
19
20
  from sky.utils import registry
20
21
  from sky.utils import rich_utils
21
22
  from sky.utils import subprocess_utils
@@ -561,7 +562,7 @@ def _format_context_details(cloud: Union[str, sky_clouds.Cloud],
561
562
  # TODO: This is a hack to remove the 'ssh-' prefix from the
562
563
  # context name. Once we have a separate kubeconfig for SSH,
563
564
  # this will not be required.
564
- cleaned_context = context.lstrip('ssh-')
565
+ cleaned_context = common_utils.removeprefix(context, 'ssh-')
565
566
  else:
566
567
  cleaned_context = context
567
568
  symbol = (ux_utils.INDENT_LAST_SYMBOL if i == len(filtered_contexts) -
sky/cli.py CHANGED
@@ -3613,7 +3613,7 @@ def show_gpus(
3613
3613
  if not _filter_ctx(ctx):
3614
3614
  continue
3615
3615
  if is_ssh:
3616
- display_ctx = ctx.lstrip('ssh-')
3616
+ display_ctx = common_utils.removeprefix(ctx, 'ssh-')
3617
3617
  else:
3618
3618
  display_ctx = ctx
3619
3619
  num_filtered_contexts += 1
sky/client/cli.py CHANGED
@@ -3613,7 +3613,7 @@ def show_gpus(
3613
3613
  if not _filter_ctx(ctx):
3614
3614
  continue
3615
3615
  if is_ssh:
3616
- display_ctx = ctx.lstrip('ssh-')
3616
+ display_ctx = common_utils.removeprefix(ctx, 'ssh-')
3617
3617
  else:
3618
3618
  display_ctx = ctx
3619
3619
  num_filtered_contexts += 1
sky/clouds/__init__.py CHANGED
@@ -19,6 +19,7 @@ from sky.clouds.cudo import Cudo
19
19
  from sky.clouds.do import DO
20
20
  from sky.clouds.fluidstack import Fluidstack
21
21
  from sky.clouds.gcp import GCP
22
+ from sky.clouds.hyperbolic import Hyperbolic
22
23
  from sky.clouds.ibm import IBM
23
24
  from sky.clouds.kubernetes import Kubernetes
24
25
  from sky.clouds.lambda_cloud import Lambda
@@ -56,6 +57,7 @@ __all__ = [
56
57
  'StatusVersion',
57
58
  'Fluidstack',
58
59
  'Nebius',
60
+ 'Hyperbolic',
59
61
  # Utility functions
60
62
  'cloud_in_iterable',
61
63
  ]
sky/clouds/cloud.py CHANGED
@@ -631,7 +631,7 @@ class Cloud:
631
631
 
632
632
  def need_cleanup_after_preemption_or_failure(
633
633
  self, resources: 'resources_lib.Resources') -> bool:
634
- """Whether a resource needs cleanup after preeemption or failure.
634
+ """Whether a resource needs cleanup after preemption or failure.
635
635
 
636
636
  In most cases, spot resources do not need cleanup after preemption,
637
637
  as long as the cluster can be relaunched with the same name and tag,
sky/clouds/gcp.py CHANGED
@@ -1062,7 +1062,7 @@ class GCP(clouds.Cloud):
1062
1062
 
1063
1063
  def need_cleanup_after_preemption_or_failure(
1064
1064
  self, resources: 'resources.Resources') -> bool:
1065
- """Whether a resource needs cleanup after preeemption or failure."""
1065
+ """Whether a resource needs cleanup after preemption or failure."""
1066
1066
  # Spot TPU VMs require manual cleanup after preemption.
1067
1067
  # "If your Cloud TPU is preempted,
1068
1068
  # you must delete it and create a new one ..."