skypilot-nightly 1.0.0.dev20250613__py3-none-any.whl → 1.0.0.dev20250614__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sky/__init__.py +4 -2
- sky/adaptors/hyperbolic.py +8 -0
- sky/authentication.py +20 -2
- sky/backends/backend_utils.py +3 -1
- sky/backends/cloud_vm_ray_backend.py +2 -1
- sky/catalog/data_fetchers/fetch_hyperbolic.py +136 -0
- sky/catalog/hyperbolic_catalog.py +133 -0
- sky/clouds/__init__.py +2 -0
- sky/clouds/hyperbolic.py +276 -0
- sky/dashboard/out/404.html +1 -1
- sky/dashboard/out/_next/static/chunks/37-7754056a4b503e1d.js +6 -0
- sky/dashboard/out/_next/static/chunks/600.bd2ed8c076b720ec.js +16 -0
- sky/dashboard/out/_next/static/chunks/{856-0776dc6ed6000c39.js → 856-c2c39c0912285e54.js} +1 -1
- sky/dashboard/out/_next/static/chunks/938-245c9ac4c9e8bf15.js +1 -0
- sky/dashboard/out/_next/static/chunks/{webpack-5c3e6471d04780c6.js → webpack-27de3d9d450d81c6.js} +1 -1
- sky/dashboard/out/_next/static/css/{5d71bfc09f184bab.css → 6f84444b8f3c656c.css} +1 -1
- sky/dashboard/out/_next/static/{UdgJCk2sZFLJgFJW_qiWG → nm5jrKpUZh2W0SxzyDKhz}/_buildManifest.js +1 -1
- sky/dashboard/out/clusters/[cluster]/[job].html +1 -1
- sky/dashboard/out/clusters/[cluster].html +1 -1
- sky/dashboard/out/clusters.html +1 -1
- sky/dashboard/out/config.html +1 -1
- sky/dashboard/out/index.html +1 -1
- sky/dashboard/out/infra/[context].html +1 -1
- sky/dashboard/out/infra.html +1 -1
- sky/dashboard/out/jobs/[job].html +1 -1
- sky/dashboard/out/jobs.html +1 -1
- sky/dashboard/out/users.html +1 -1
- sky/dashboard/out/workspace/new.html +1 -1
- sky/dashboard/out/workspaces/[name].html +1 -1
- sky/dashboard/out/workspaces.html +1 -1
- sky/provision/__init__.py +1 -0
- sky/provision/hyperbolic/__init__.py +11 -0
- sky/provision/hyperbolic/config.py +10 -0
- sky/provision/hyperbolic/instance.py +423 -0
- sky/provision/hyperbolic/utils.py +373 -0
- sky/setup_files/dependencies.py +2 -1
- sky/skylet/constants.py +1 -1
- sky/templates/hyperbolic-ray.yml.j2 +67 -0
- sky/users/permission.py +2 -0
- {skypilot_nightly-1.0.0.dev20250613.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/METADATA +2 -1
- {skypilot_nightly-1.0.0.dev20250613.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/RECORD +50 -41
- sky/dashboard/out/_next/static/chunks/37-d8aebf1683522a0b.js +0 -6
- sky/dashboard/out/_next/static/chunks/600.15a0009177e86b86.js +0 -16
- sky/dashboard/out/_next/static/chunks/938-ab185187a63f9cdb.js +0 -1
- /sky/dashboard/out/_next/static/chunks/{843-6fcc4bf91ac45b39.js → 843-5011affc9540757f.js} +0 -0
- /sky/dashboard/out/_next/static/chunks/pages/{_app-7bbd9d39d6f9a98a.js → _app-664031f6ae737f80.js} +0 -0
- /sky/dashboard/out/_next/static/chunks/pages/clusters/{[cluster]-451a14e7e755ebbc.js → [cluster]-20210f8cd809063d.js} +0 -0
- /sky/dashboard/out/_next/static/chunks/pages/{jobs-fe233baf3d073491.js → jobs-ae7a5e9fa5a5b5f0.js} +0 -0
- /sky/dashboard/out/_next/static/{UdgJCk2sZFLJgFJW_qiWG → nm5jrKpUZh2W0SxzyDKhz}/_ssgManifest.js +0 -0
- {skypilot_nightly-1.0.0.dev20250613.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/WHEEL +0 -0
- {skypilot_nightly-1.0.0.dev20250613.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/entry_points.txt +0 -0
- {skypilot_nightly-1.0.0.dev20250613.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/licenses/LICENSE +0 -0
- {skypilot_nightly-1.0.0.dev20250613.dist-info → skypilot_nightly-1.0.0.dev20250614.dist-info}/top_level.txt +0 -0
sky/__init__.py
CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
|
|
5
5
|
import urllib.request
|
6
6
|
|
7
7
|
# Replaced with the current commit when building the wheels.
|
8
|
-
_SKYPILOT_COMMIT_SHA = '
|
8
|
+
_SKYPILOT_COMMIT_SHA = '660d2bde857362e597fa3c30252693fe4d8c3939'
|
9
9
|
|
10
10
|
|
11
11
|
def _get_git_commit():
|
@@ -35,7 +35,7 @@ def _get_git_commit():
|
|
35
35
|
|
36
36
|
|
37
37
|
__commit__ = _get_git_commit()
|
38
|
-
__version__ = '1.0.0.
|
38
|
+
__version__ = '1.0.0.dev20250614'
|
39
39
|
__root_dir__ = os.path.dirname(os.path.abspath(__file__))
|
40
40
|
|
41
41
|
|
@@ -144,6 +144,7 @@ Vast = clouds.Vast
|
|
144
144
|
Vsphere = clouds.Vsphere
|
145
145
|
Fluidstack = clouds.Fluidstack
|
146
146
|
Nebius = clouds.Nebius
|
147
|
+
Hyperbolic = clouds.Hyperbolic
|
147
148
|
|
148
149
|
__all__ = [
|
149
150
|
'__version__',
|
@@ -163,6 +164,7 @@ __all__ = [
|
|
163
164
|
'Vsphere',
|
164
165
|
'Fluidstack',
|
165
166
|
'Nebius',
|
167
|
+
'Hyperbolic',
|
166
168
|
'Optimizer',
|
167
169
|
'OptimizeTarget',
|
168
170
|
'backends',
|
sky/authentication.py
CHANGED
@@ -432,8 +432,8 @@ def setup_kubernetes_authentication(config: Dict[str, Any]) -> Dict[str, Any]:
|
|
432
432
|
# Add message saying "Please check: ~/.sky/config.yaml" to the error
|
433
433
|
# message.
|
434
434
|
with ux_utils.print_exception_no_traceback():
|
435
|
-
raise ValueError(str(e) +
|
436
|
-
|
435
|
+
raise ValueError(str(e) +
|
436
|
+
' Please check: ~/.sky/config.yaml.') from None
|
437
437
|
_, public_key_path = get_or_generate_keys()
|
438
438
|
|
439
439
|
# Add the user's public key to the SkyPilot cluster.
|
@@ -567,3 +567,21 @@ def setup_fluidstack_authentication(config: Dict[str, Any]) -> Dict[str, Any]:
|
|
567
567
|
client.get_or_add_ssh_key(public_key)
|
568
568
|
config['auth']['ssh_public_key'] = public_key_path
|
569
569
|
return configure_ssh_info(config)
|
570
|
+
|
571
|
+
|
572
|
+
def setup_hyperbolic_authentication(config: Dict[str, Any]) -> Dict[str, Any]:
|
573
|
+
"""Sets up SSH authentication for Hyperbolic."""
|
574
|
+
_, public_key_path = get_or_generate_keys()
|
575
|
+
with open(public_key_path, 'r', encoding='utf-8') as f:
|
576
|
+
public_key = f.read().strip()
|
577
|
+
|
578
|
+
# TODO: adjust below to use public_keys instead of
|
579
|
+
# public_key once backwards-compatibility is no longer required
|
580
|
+
config['publicKey'] = public_key
|
581
|
+
|
582
|
+
# Set up auth section for Ray template
|
583
|
+
config.setdefault('auth', {})
|
584
|
+
config['auth']['ssh_user'] = 'ubuntu'
|
585
|
+
config['auth']['ssh_public_key'] = public_key_path
|
586
|
+
|
587
|
+
return configure_ssh_info(config)
|
sky/backends/backend_utils.py
CHANGED
@@ -939,6 +939,8 @@ def _add_auth_to_cluster_config(cloud: clouds.Cloud, tmp_yaml_path: str):
|
|
939
939
|
config = auth.setup_vast_authentication(config)
|
940
940
|
elif isinstance(cloud, clouds.Fluidstack):
|
941
941
|
config = auth.setup_fluidstack_authentication(config)
|
942
|
+
elif isinstance(cloud, clouds.Hyperbolic):
|
943
|
+
config = auth.setup_hyperbolic_authentication(config)
|
942
944
|
else:
|
943
945
|
assert False, cloud
|
944
946
|
common_utils.dump_yaml(tmp_yaml_path, config)
|
@@ -2508,7 +2510,7 @@ def is_controller_accessible(
|
|
2508
2510
|
need_connection_check):
|
2509
2511
|
# Check ssh connection if (1) controller is in INIT state, or (2) we failed to fetch the
|
2510
2512
|
# status, both of which can happen when controller's status lock is held by another `sky jobs launch` or
|
2511
|
-
# `sky serve up`. If we have
|
2513
|
+
# `sky serve up`. If we have controller's head_ip available and it is ssh-reachable,
|
2512
2514
|
# we can allow access to the controller.
|
2513
2515
|
ssh_credentials = ssh_credential_from_yaml(handle.cluster_yaml,
|
2514
2516
|
handle.docker_user,
|
@@ -196,7 +196,8 @@ def _get_cluster_config_template(cloud):
|
|
196
196
|
clouds.Vsphere: 'vsphere-ray.yml.j2',
|
197
197
|
clouds.Vast: 'vast-ray.yml.j2',
|
198
198
|
clouds.Fluidstack: 'fluidstack-ray.yml.j2',
|
199
|
-
clouds.Nebius: 'nebius-ray.yml.j2'
|
199
|
+
clouds.Nebius: 'nebius-ray.yml.j2',
|
200
|
+
clouds.Hyperbolic: 'hyperbolic-ray.yml.j2'
|
200
201
|
}
|
201
202
|
return cloud_to_template[type(cloud)]
|
202
203
|
|
@@ -0,0 +1,136 @@
|
|
1
|
+
"""Script to fetch Hyperbolic instance data and generate catalog."""
|
2
|
+
import argparse
|
3
|
+
import csv
|
4
|
+
import json
|
5
|
+
import os
|
6
|
+
import sys
|
7
|
+
from typing import Any, Dict
|
8
|
+
|
9
|
+
import requests
|
10
|
+
|
11
|
+
ENDPOINT = 'https://api.hyperbolic.xyz/v2/skypilot/catalog'
|
12
|
+
API_KEY_PATH = os.path.expanduser('~/.hyperbolic/api_key')
|
13
|
+
|
14
|
+
REQUIRED_FIELDS = [
|
15
|
+
'InstanceType', 'AcceleratorName', 'AcceleratorCount', 'vCPUs', 'MemoryGiB',
|
16
|
+
'StorageGiB', 'Price', 'Region', 'GpuInfo', 'SpotPrice'
|
17
|
+
]
|
18
|
+
|
19
|
+
|
20
|
+
class HyperbolicCatalogError(Exception):
|
21
|
+
"""Base exception for Hyperbolic catalog errors."""
|
22
|
+
pass
|
23
|
+
|
24
|
+
|
25
|
+
def get_api_key(api_key=None) -> str:
|
26
|
+
"""Get API key from arg, env var, or file."""
|
27
|
+
if api_key:
|
28
|
+
return api_key
|
29
|
+
if api_key := os.environ.get('HYPERBOLIC_API_KEY'):
|
30
|
+
return api_key
|
31
|
+
try:
|
32
|
+
with open(API_KEY_PATH, 'r', encoding='utf-8') as f:
|
33
|
+
return f.read().strip()
|
34
|
+
except FileNotFoundError as exc:
|
35
|
+
raise HyperbolicCatalogError(
|
36
|
+
'No API key found. Please either:\n'
|
37
|
+
'1. Pass --api-key\n'
|
38
|
+
'2. Set HYPERBOLIC_API_KEY environment variable\n'
|
39
|
+
'3. Create ~/.hyperbolic/api_key file') from exc
|
40
|
+
|
41
|
+
|
42
|
+
def get_output_path() -> str:
|
43
|
+
"""Get output path for catalog file."""
|
44
|
+
current_dir = os.getcwd()
|
45
|
+
if os.path.basename(current_dir) == 'hyperbolic':
|
46
|
+
return 'vms.csv'
|
47
|
+
hyperbolic_dir = os.path.join(current_dir, 'hyperbolic')
|
48
|
+
os.makedirs(hyperbolic_dir, exist_ok=True)
|
49
|
+
return os.path.join(hyperbolic_dir, 'vms.csv')
|
50
|
+
|
51
|
+
|
52
|
+
def validate_instance_data(instance: Dict[str, Any]) -> None:
|
53
|
+
"""Validate instance data has all required fields."""
|
54
|
+
missing_fields = [
|
55
|
+
field for field in REQUIRED_FIELDS if field not in instance
|
56
|
+
]
|
57
|
+
if missing_fields:
|
58
|
+
raise HyperbolicCatalogError(
|
59
|
+
f'Instance data missing required fields: {missing_fields}')
|
60
|
+
|
61
|
+
|
62
|
+
def create_catalog(api_key=None) -> None:
|
63
|
+
"""Generate Hyperbolic catalog CSV file."""
|
64
|
+
try:
|
65
|
+
response = requests.get(
|
66
|
+
ENDPOINT,
|
67
|
+
headers={'Authorization': f'Bearer {get_api_key(api_key)}'},
|
68
|
+
timeout=30)
|
69
|
+
response.raise_for_status()
|
70
|
+
|
71
|
+
try:
|
72
|
+
data = response.json()
|
73
|
+
except json.JSONDecodeError as e:
|
74
|
+
raise HyperbolicCatalogError(
|
75
|
+
f'Invalid JSON response from API: {response.text}') from e
|
76
|
+
|
77
|
+
if 'vms' not in data:
|
78
|
+
raise HyperbolicCatalogError(
|
79
|
+
f'Missing "vms" field in API response: {data}')
|
80
|
+
|
81
|
+
instances = data['vms']
|
82
|
+
if not isinstance(instances, list):
|
83
|
+
raise HyperbolicCatalogError(
|
84
|
+
f'Expected list of instances, got {type(instances)}')
|
85
|
+
|
86
|
+
if not instances:
|
87
|
+
raise HyperbolicCatalogError('No instances found in API response')
|
88
|
+
|
89
|
+
# Validate each instance
|
90
|
+
for instance in instances:
|
91
|
+
validate_instance_data(instance)
|
92
|
+
|
93
|
+
except requests.exceptions.RequestException as e:
|
94
|
+
raise HyperbolicCatalogError(
|
95
|
+
f'Failed to fetch instance data: {e}') from e
|
96
|
+
|
97
|
+
output_path = get_output_path()
|
98
|
+
try:
|
99
|
+
with open(output_path, 'w', newline='', encoding='utf-8') as f:
|
100
|
+
writer = csv.DictWriter(f, fieldnames=REQUIRED_FIELDS)
|
101
|
+
writer.writeheader()
|
102
|
+
|
103
|
+
for instance in instances:
|
104
|
+
entry = instance.copy()
|
105
|
+
# Convert GpuInfo to string format
|
106
|
+
entry['GpuInfo'] = json.dumps(entry['GpuInfo'],
|
107
|
+
ensure_ascii=False).replace(
|
108
|
+
'"', "'") # pylint: disable=invalid-string-quote
|
109
|
+
writer.writerow(entry)
|
110
|
+
except (IOError, OSError) as e:
|
111
|
+
raise HyperbolicCatalogError(
|
112
|
+
f'Failed to write catalog file to {output_path}: {e}') from e
|
113
|
+
|
114
|
+
|
115
|
+
def main() -> int:
|
116
|
+
"""Main entry point."""
|
117
|
+
parser = argparse.ArgumentParser(
|
118
|
+
description='Fetch Hyperbolic instance data')
|
119
|
+
parser.add_argument('--api-key', help='Hyperbolic API key')
|
120
|
+
args = parser.parse_args()
|
121
|
+
|
122
|
+
try:
|
123
|
+
create_catalog(args.api_key)
|
124
|
+
print(f'Hyperbolic Service Catalog saved to {get_output_path()}')
|
125
|
+
return 0
|
126
|
+
except HyperbolicCatalogError as e:
|
127
|
+
print(f'Error: {e}', file=sys.stderr)
|
128
|
+
return 1
|
129
|
+
except (requests.exceptions.RequestException, json.JSONDecodeError, IOError,
|
130
|
+
OSError) as e:
|
131
|
+
print(f'Unexpected error: {e}', file=sys.stderr)
|
132
|
+
return 1
|
133
|
+
|
134
|
+
|
135
|
+
if __name__ == '__main__':
|
136
|
+
sys.exit(main())
|
@@ -0,0 +1,133 @@
|
|
1
|
+
"""Hyperbolic Cloud service catalog.
|
2
|
+
|
3
|
+
This module loads and queries the service catalog for Hyperbolic Cloud.
|
4
|
+
"""
|
5
|
+
from typing import Dict, List, Optional, Tuple, Union
|
6
|
+
|
7
|
+
from sky.catalog import common
|
8
|
+
from sky.clouds import cloud # Import cloud here for Region
|
9
|
+
from sky.utils import ux_utils
|
10
|
+
|
11
|
+
# Initialize cloud variable at module level
|
12
|
+
CLOUD = 'hyperbolic'
|
13
|
+
|
14
|
+
_df = common.read_catalog('hyperbolic/vms.csv')
|
15
|
+
|
16
|
+
|
17
|
+
def instance_type_exists(instance_type: str) -> bool:
|
18
|
+
return common.instance_type_exists_impl(_df, instance_type)
|
19
|
+
|
20
|
+
|
21
|
+
def validate_region_zone(
|
22
|
+
region: Optional[str],
|
23
|
+
zone: Optional[str]) -> Tuple[Optional[str], Optional[str]]:
|
24
|
+
if zone is not None:
|
25
|
+
with ux_utils.print_exception_no_traceback():
|
26
|
+
raise ValueError('Hyperbolic Cloud does not support zones.')
|
27
|
+
return common.validate_region_zone_impl('hyperbolic', _df, region, zone)
|
28
|
+
|
29
|
+
|
30
|
+
def get_hourly_cost(
|
31
|
+
instance_type: str,
|
32
|
+
use_spot: bool = False,
|
33
|
+
region: Optional[str] = None,
|
34
|
+
zone: Optional[str] = None,
|
35
|
+
) -> float:
|
36
|
+
if zone is not None:
|
37
|
+
with ux_utils.print_exception_no_traceback():
|
38
|
+
raise ValueError('Hyperbolic Cloud does not support zones.')
|
39
|
+
return common.get_hourly_cost_impl(_df, instance_type, use_spot, region,
|
40
|
+
zone)
|
41
|
+
|
42
|
+
|
43
|
+
def get_vcpus_mem_from_instance_type(
|
44
|
+
instance_type: str,) -> Tuple[Optional[float], Optional[float]]:
|
45
|
+
return common.get_vcpus_mem_from_instance_type_impl(_df, instance_type)
|
46
|
+
|
47
|
+
|
48
|
+
def get_accelerators_from_instance_type(
|
49
|
+
instance_type: str) -> Optional[Dict[str, Union[int, float]]]:
|
50
|
+
return common.get_accelerators_from_instance_type_impl(_df, instance_type)
|
51
|
+
|
52
|
+
|
53
|
+
def get_vcpus_from_instance_type(instance_type: str) -> Optional[float]:
|
54
|
+
vcpus, _ = get_vcpus_mem_from_instance_type(instance_type)
|
55
|
+
return vcpus
|
56
|
+
|
57
|
+
|
58
|
+
def get_memory_from_instance_type(instance_type: str) -> Optional[float]:
|
59
|
+
_, mem = get_vcpus_mem_from_instance_type(instance_type)
|
60
|
+
return mem
|
61
|
+
|
62
|
+
|
63
|
+
def get_zone_shell_cmd() -> Optional[str]:
|
64
|
+
"""Returns the shell command to obtain the zone."""
|
65
|
+
return None
|
66
|
+
|
67
|
+
|
68
|
+
def get_default_instance_type(cpus: Optional[str] = None,
|
69
|
+
memory: Optional[str] = None,
|
70
|
+
disk_tier: Optional[str] = None) -> Optional[str]:
|
71
|
+
del disk_tier # Unused
|
72
|
+
return common.get_instance_type_for_cpus_mem_impl(_df, cpus, memory)
|
73
|
+
|
74
|
+
|
75
|
+
def get_instance_type_for_accelerator(
|
76
|
+
acc_name: str,
|
77
|
+
acc_count: int,
|
78
|
+
cpus: Optional[str] = None,
|
79
|
+
memory: Optional[str] = None,
|
80
|
+
use_spot: bool = False,
|
81
|
+
region: Optional[str] = None,
|
82
|
+
zone: Optional[str] = None,
|
83
|
+
) -> Tuple[Optional[List[str]], List[str]]:
|
84
|
+
if zone is not None:
|
85
|
+
with ux_utils.print_exception_no_traceback():
|
86
|
+
raise ValueError('Hyperbolic Cloud does not support zones.')
|
87
|
+
return common.get_instance_type_for_accelerator_impl(df=_df,
|
88
|
+
acc_name=acc_name,
|
89
|
+
acc_count=acc_count,
|
90
|
+
cpus=cpus,
|
91
|
+
memory=memory,
|
92
|
+
use_spot=use_spot,
|
93
|
+
region=region,
|
94
|
+
zone=zone)
|
95
|
+
|
96
|
+
|
97
|
+
def get_region_zones_for_instance_type(instance_type: str,
|
98
|
+
use_spot: bool) -> List[cloud.Region]:
|
99
|
+
df = _df[_df['InstanceType'] == instance_type]
|
100
|
+
return common.get_region_zones(df, use_spot)
|
101
|
+
|
102
|
+
|
103
|
+
def get_gen_version(instance_type: str) -> Optional[str]:
|
104
|
+
"""Returns the generation version of the instance type."""
|
105
|
+
del instance_type # Unused
|
106
|
+
# TODO: Implement generation version detection
|
107
|
+
return None
|
108
|
+
|
109
|
+
|
110
|
+
def list_accelerators(
|
111
|
+
gpus_only: bool = True,
|
112
|
+
name_filter: Optional[str] = None,
|
113
|
+
region_filter: Optional[str] = None,
|
114
|
+
quantity_filter: Optional[int] = None,
|
115
|
+
case_sensitive: bool = True,
|
116
|
+
all_regions: bool = False,
|
117
|
+
require_price: bool = True,
|
118
|
+
) -> Dict[str, List[common.InstanceTypeInfo]]:
|
119
|
+
"""Returns all instance types in Hyperbolic Cloud offering accelerators."""
|
120
|
+
del require_price # Unused
|
121
|
+
return common.list_accelerators_impl('Hyperbolic', _df, gpus_only,
|
122
|
+
name_filter, region_filter,
|
123
|
+
quantity_filter, case_sensitive,
|
124
|
+
all_regions)
|
125
|
+
|
126
|
+
|
127
|
+
def get_instance_type_from_catalog() -> dict:
|
128
|
+
# TODO: Implement this function
|
129
|
+
return {}
|
130
|
+
|
131
|
+
|
132
|
+
def regions() -> List[cloud.Region]:
|
133
|
+
return [cloud.Region('default')]
|
sky/clouds/__init__.py
CHANGED
@@ -19,6 +19,7 @@ from sky.clouds.cudo import Cudo
|
|
19
19
|
from sky.clouds.do import DO
|
20
20
|
from sky.clouds.fluidstack import Fluidstack
|
21
21
|
from sky.clouds.gcp import GCP
|
22
|
+
from sky.clouds.hyperbolic import Hyperbolic
|
22
23
|
from sky.clouds.ibm import IBM
|
23
24
|
from sky.clouds.kubernetes import Kubernetes
|
24
25
|
from sky.clouds.lambda_cloud import Lambda
|
@@ -56,6 +57,7 @@ __all__ = [
|
|
56
57
|
'StatusVersion',
|
57
58
|
'Fluidstack',
|
58
59
|
'Nebius',
|
60
|
+
'Hyperbolic',
|
59
61
|
# Utility functions
|
60
62
|
'cloud_in_iterable',
|
61
63
|
]
|
sky/clouds/hyperbolic.py
ADDED
@@ -0,0 +1,276 @@
|
|
1
|
+
"""Hyperbolic Cloud provider implementation
|
2
|
+
for SkyPilot.
|
3
|
+
"""
|
4
|
+
import os
|
5
|
+
import typing
|
6
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
7
|
+
|
8
|
+
from sky import catalog
|
9
|
+
from sky import clouds
|
10
|
+
from sky.utils import registry
|
11
|
+
from sky.utils import resources_utils
|
12
|
+
from sky.utils.resources_utils import DiskTier
|
13
|
+
|
14
|
+
if typing.TYPE_CHECKING:
|
15
|
+
from sky import resources as resources_lib
|
16
|
+
|
17
|
+
|
18
|
+
@registry.CLOUD_REGISTRY.register
|
19
|
+
class Hyperbolic(clouds.Cloud):
|
20
|
+
"""Hyperbolic Cloud Provider."""
|
21
|
+
|
22
|
+
_REPR = 'Hyperbolic'
|
23
|
+
name = 'hyperbolic'
|
24
|
+
_MAX_CLUSTER_NAME_LEN_LIMIT = 120
|
25
|
+
API_KEY_PATH = os.path.expanduser('~/.hyperbolic/api_key')
|
26
|
+
|
27
|
+
_CLOUD_UNSUPPORTED_FEATURES = {
|
28
|
+
clouds.CloudImplementationFeatures.STOP: ('Stopping not supported.'),
|
29
|
+
clouds.CloudImplementationFeatures.MULTI_NODE:
|
30
|
+
('Multi-node not supported.'),
|
31
|
+
clouds.CloudImplementationFeatures.CUSTOM_DISK_TIER:
|
32
|
+
('Custom disk tiers not supported.'),
|
33
|
+
clouds.CloudImplementationFeatures.STORAGE_MOUNTING:
|
34
|
+
('Storage mounting not supported.'),
|
35
|
+
clouds.CloudImplementationFeatures.HIGH_AVAILABILITY_CONTROLLERS:
|
36
|
+
('High availability controllers not supported.'),
|
37
|
+
clouds.CloudImplementationFeatures.SPOT_INSTANCE:
|
38
|
+
('Spot instances not supported.'),
|
39
|
+
clouds.CloudImplementationFeatures.CLONE_DISK_FROM_CLUSTER:
|
40
|
+
('Disk cloning not supported.'),
|
41
|
+
clouds.CloudImplementationFeatures.DOCKER_IMAGE:
|
42
|
+
('Docker images not supported.'),
|
43
|
+
clouds.CloudImplementationFeatures.OPEN_PORTS:
|
44
|
+
('Opening ports not supported.'),
|
45
|
+
clouds.CloudImplementationFeatures.IMAGE_ID:
|
46
|
+
('Custom image IDs not supported.'),
|
47
|
+
clouds.CloudImplementationFeatures.CUSTOM_NETWORK_TIER:
|
48
|
+
('Custom network tiers not supported.'),
|
49
|
+
clouds.CloudImplementationFeatures.HOST_CONTROLLERS:
|
50
|
+
('Host controllers not supported.'),
|
51
|
+
clouds.CloudImplementationFeatures.AUTO_TERMINATE:
|
52
|
+
('Auto-termination not supported.'),
|
53
|
+
clouds.CloudImplementationFeatures.AUTOSTOP:
|
54
|
+
('Auto-stop not supported.'),
|
55
|
+
clouds.CloudImplementationFeatures.AUTODOWN:
|
56
|
+
('Auto-down not supported.'),
|
57
|
+
}
|
58
|
+
|
59
|
+
PROVISIONER_VERSION = clouds.ProvisionerVersion.SKYPILOT
|
60
|
+
STATUS_VERSION = clouds.StatusVersion.SKYPILOT
|
61
|
+
OPEN_PORTS_VERSION = clouds.OpenPortsVersion.LAUNCH_ONLY
|
62
|
+
|
63
|
+
@classmethod
|
64
|
+
def _unsupported_features_for_resources(
|
65
|
+
cls, resources: 'resources_lib.Resources'
|
66
|
+
) -> Dict[clouds.CloudImplementationFeatures, str]:
|
67
|
+
del resources
|
68
|
+
return cls._CLOUD_UNSUPPORTED_FEATURES
|
69
|
+
|
70
|
+
@classmethod
|
71
|
+
def _max_cluster_name_length(cls) -> Optional[int]:
|
72
|
+
return cls._MAX_CLUSTER_NAME_LEN_LIMIT
|
73
|
+
|
74
|
+
def instance_type_exists(self, instance_type: str) -> bool:
|
75
|
+
return catalog.instance_type_exists(instance_type, 'hyperbolic')
|
76
|
+
|
77
|
+
@classmethod
|
78
|
+
def regions_with_offering(cls, instance_type: str,
|
79
|
+
accelerators: Optional[Dict[str, int]],
|
80
|
+
use_spot: bool, region: Optional[str],
|
81
|
+
zone: Optional[str]) -> List[clouds.Region]:
|
82
|
+
assert zone is None, 'Hyperbolic does not support zones.'
|
83
|
+
del accelerators, zone # unused
|
84
|
+
|
85
|
+
regions = catalog.get_region_zones_for_instance_type(
|
86
|
+
instance_type, use_spot, 'hyperbolic')
|
87
|
+
if region is not None:
|
88
|
+
regions = [r for r in regions if r.name == region]
|
89
|
+
return regions
|
90
|
+
|
91
|
+
@classmethod
|
92
|
+
def get_vcpus_mem_from_instance_type(
|
93
|
+
cls, instance_type: str) -> Tuple[Optional[float], Optional[float]]:
|
94
|
+
return catalog.get_vcpus_mem_from_instance_type(instance_type,
|
95
|
+
clouds='hyperbolic')
|
96
|
+
|
97
|
+
def instance_type_to_hourly_cost(self,
|
98
|
+
instance_type: str,
|
99
|
+
use_spot: bool,
|
100
|
+
region: Optional[str] = None,
|
101
|
+
zone: Optional[str] = None) -> float:
|
102
|
+
return catalog.get_hourly_cost(instance_type,
|
103
|
+
use_spot=use_spot,
|
104
|
+
region=region,
|
105
|
+
zone=zone,
|
106
|
+
clouds='hyperbolic')
|
107
|
+
|
108
|
+
@classmethod
|
109
|
+
def get_default_instance_type(
|
110
|
+
cls,
|
111
|
+
cpus: Optional[str] = None,
|
112
|
+
memory: Optional[str] = None,
|
113
|
+
disk_tier: Optional[DiskTier] = None) -> Optional[str]:
|
114
|
+
return catalog.get_default_instance_type(cpus=cpus,
|
115
|
+
memory=memory,
|
116
|
+
disk_tier=disk_tier,
|
117
|
+
clouds='hyperbolic')
|
118
|
+
|
119
|
+
@classmethod
|
120
|
+
def get_accelerators_from_instance_type(
|
121
|
+
cls, instance_type: str) -> Optional[Dict[str, Union[int, float]]]:
|
122
|
+
return catalog.get_accelerators_from_instance_type(instance_type,
|
123
|
+
clouds='hyperbolic')
|
124
|
+
|
125
|
+
@classmethod
|
126
|
+
def _check_credentials(cls) -> Tuple[bool, Optional[str]]:
|
127
|
+
if os.path.exists(cls.API_KEY_PATH):
|
128
|
+
return True, None
|
129
|
+
return False, f'API key not found at {cls.API_KEY_PATH}'
|
130
|
+
|
131
|
+
@classmethod
|
132
|
+
def _check_compute_credentials(cls) -> Tuple[bool, Optional[str]]:
|
133
|
+
return cls._check_credentials()
|
134
|
+
|
135
|
+
@classmethod
|
136
|
+
def get_credential_file_mounts(cls) -> Dict[str, str]:
|
137
|
+
if os.path.exists(cls.API_KEY_PATH):
|
138
|
+
return {cls.API_KEY_PATH: '~/.hyperbolic/api_key'}
|
139
|
+
return {}
|
140
|
+
|
141
|
+
def __repr__(self):
|
142
|
+
return self._REPR
|
143
|
+
|
144
|
+
def _get_feasible_launchable_resources(
|
145
|
+
self, resources: 'resources_lib.Resources'
|
146
|
+
) -> 'resources_utils.FeasibleResources':
|
147
|
+
# Check if the instance type exists in the catalog
|
148
|
+
if resources.instance_type is not None:
|
149
|
+
if catalog.instance_type_exists(resources.instance_type,
|
150
|
+
'hyperbolic'):
|
151
|
+
# Remove accelerators for launchable resources
|
152
|
+
resources_launch = resources.copy(accelerators=None)
|
153
|
+
return resources_utils.FeasibleResources([resources_launch], [],
|
154
|
+
None)
|
155
|
+
else:
|
156
|
+
raise ValueError(
|
157
|
+
f'Invalid instance type: {resources.instance_type}')
|
158
|
+
|
159
|
+
# If accelerators are specified
|
160
|
+
accelerators = resources.accelerators
|
161
|
+
if accelerators is not None:
|
162
|
+
assert len(accelerators) == 1, resources
|
163
|
+
acc, acc_count = list(accelerators.items())[0]
|
164
|
+
(instance_list,
|
165
|
+
fuzzy_candidate_list) = catalog.get_instance_type_for_accelerator(
|
166
|
+
acc,
|
167
|
+
acc_count,
|
168
|
+
use_spot=resources.use_spot,
|
169
|
+
cpus=resources.cpus,
|
170
|
+
memory=resources.memory,
|
171
|
+
region=resources.region,
|
172
|
+
zone=resources.zone,
|
173
|
+
clouds='hyperbolic')
|
174
|
+
if instance_list is None:
|
175
|
+
return resources_utils.FeasibleResources([],
|
176
|
+
fuzzy_candidate_list,
|
177
|
+
None)
|
178
|
+
|
179
|
+
def _make(instance_list):
|
180
|
+
resource_list = []
|
181
|
+
for instance_type in instance_list:
|
182
|
+
r = resources.copy(
|
183
|
+
cloud=self,
|
184
|
+
instance_type=instance_type,
|
185
|
+
accelerators=None,
|
186
|
+
cpus=None,
|
187
|
+
memory=None,
|
188
|
+
)
|
189
|
+
resource_list.append(r)
|
190
|
+
return resource_list
|
191
|
+
|
192
|
+
return resources_utils.FeasibleResources(_make(instance_list),
|
193
|
+
fuzzy_candidate_list, None)
|
194
|
+
|
195
|
+
# If nothing is specified, return a default instance type
|
196
|
+
default_instance_type = self.get_default_instance_type(
|
197
|
+
cpus=resources.cpus,
|
198
|
+
memory=resources.memory,
|
199
|
+
disk_tier=resources.disk_tier)
|
200
|
+
if default_instance_type is None:
|
201
|
+
return resources_utils.FeasibleResources([], [], None)
|
202
|
+
else:
|
203
|
+
r = resources.copy(
|
204
|
+
cloud=self,
|
205
|
+
instance_type=default_instance_type,
|
206
|
+
accelerators=None,
|
207
|
+
cpus=None,
|
208
|
+
memory=None,
|
209
|
+
)
|
210
|
+
return resources_utils.FeasibleResources([r], [], None)
|
211
|
+
|
212
|
+
def validate_region_zone(
|
213
|
+
self, region: Optional[str],
|
214
|
+
zone: Optional[str]) -> Tuple[Optional[str], Optional[str]]:
|
215
|
+
if zone is not None:
|
216
|
+
raise ValueError('Hyperbolic does not support zones.')
|
217
|
+
return catalog.validate_region_zone(region, zone, 'hyperbolic')
|
218
|
+
|
219
|
+
@classmethod
|
220
|
+
def regions(cls) -> List[clouds.Region]:
|
221
|
+
"""Returns the list of regions in Hyperbolic's catalog."""
|
222
|
+
return catalog.regions('hyperbolic')
|
223
|
+
|
224
|
+
@classmethod
|
225
|
+
def zones_provision_loop(cls,
|
226
|
+
*,
|
227
|
+
region: str,
|
228
|
+
num_nodes: int,
|
229
|
+
instance_type: str,
|
230
|
+
accelerators: Optional[Dict[str, int]] = None,
|
231
|
+
use_spot: bool = False):
|
232
|
+
yield None
|
233
|
+
|
234
|
+
@classmethod
|
235
|
+
def get_zone_shell_cmd(cls) -> Optional[str]:
|
236
|
+
return None
|
237
|
+
|
238
|
+
def get_egress_cost(self, num_gigabytes: float):
|
239
|
+
return 0.0
|
240
|
+
|
241
|
+
def accelerators_to_hourly_cost(self, accelerators: Dict[str, int],
|
242
|
+
use_spot: bool, region: Optional[str],
|
243
|
+
zone: Optional[str]) -> float:
|
244
|
+
return 0.0
|
245
|
+
|
246
|
+
def make_deploy_resources_variables(
|
247
|
+
self,
|
248
|
+
resources: 'resources_lib.Resources',
|
249
|
+
cluster_name: resources_utils.ClusterName,
|
250
|
+
region: 'clouds.Region',
|
251
|
+
zones: Optional[List['clouds.Zone']],
|
252
|
+
num_nodes: int,
|
253
|
+
dryrun: bool = False) -> Dict[str, Any]:
|
254
|
+
"""Returns a dict of variables for the deployment template."""
|
255
|
+
del dryrun, region, cluster_name # unused
|
256
|
+
assert zones is None, ('Hyperbolic does not support zones', zones)
|
257
|
+
|
258
|
+
resources = resources.assert_launchable()
|
259
|
+
# resources.accelerators is cleared but .instance_type encodes the info.
|
260
|
+
acc_dict = self.get_accelerators_from_instance_type(
|
261
|
+
resources.instance_type)
|
262
|
+
custom_resources = resources_utils.make_ray_custom_resources_str(
|
263
|
+
acc_dict)
|
264
|
+
|
265
|
+
return {
|
266
|
+
'instance_type': resources.instance_type,
|
267
|
+
'custom_resources': custom_resources,
|
268
|
+
'num_nodes': 1, # Hyperbolic only supports single-node clusters
|
269
|
+
}
|
270
|
+
|
271
|
+
def cluster_name_in_hint(self, cluster_name_on_cloud: Optional[str],
|
272
|
+
cluster_name: str) -> bool:
|
273
|
+
"""Check if a node's name matches the cluster name pattern."""
|
274
|
+
if cluster_name_on_cloud is None:
|
275
|
+
return False
|
276
|
+
return cluster_name_on_cloud.startswith(cluster_name)
|
sky/dashboard/out/404.html
CHANGED
@@ -1 +1 @@
|
|
1
|
-
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/
|
1
|
+
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/6f84444b8f3c656c.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/6f84444b8f3c656c.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-27de3d9d450d81c6.js" defer=""></script><script src="/dashboard/_next/static/chunks/framework-87d061ee6ed71b28.js" defer=""></script><script src="/dashboard/_next/static/chunks/main-e0e2335212e72357.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/_app-664031f6ae737f80.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/_error-1be831200e60c5c0.js" defer=""></script><script src="/dashboard/_next/static/nm5jrKpUZh2W0SxzyDKhz/_buildManifest.js" defer=""></script><script src="/dashboard/_next/static/nm5jrKpUZh2W0SxzyDKhz/_ssgManifest.js" defer=""></script></head><body><div id="__next"></div><script id="__NEXT_DATA__" type="application/json">{"props":{"pageProps":{"statusCode":404}},"page":"/_error","query":{},"buildId":"nm5jrKpUZh2W0SxzyDKhz","assetPrefix":"/dashboard","nextExport":true,"isFallback":false,"gip":true,"scriptLoader":[]}</script></body></html>
|