skypilot-nightly 1.0.0.dev20250716__py3-none-any.whl → 1.0.0.dev20250718__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sky/__init__.py +4 -2
- sky/backends/backend.py +8 -4
- sky/backends/cloud_vm_ray_backend.py +50 -1
- sky/backends/docker_utils.py +1 -1
- sky/backends/local_docker_backend.py +2 -1
- sky/catalog/common.py +60 -50
- sky/catalog/data_fetchers/fetch_gcp.py +1 -0
- sky/catalog/gcp_catalog.py +24 -7
- sky/catalog/kubernetes_catalog.py +5 -1
- sky/client/cli/command.py +180 -77
- sky/client/cli/git.py +549 -0
- sky/client/common.py +1 -1
- sky/client/sdk.py +1 -1
- sky/clouds/gcp.py +1 -1
- sky/dashboard/out/404.html +1 -1
- sky/dashboard/out/_next/static/{gVXjeFhvtWXyOsx9xYNvM → FUjweqdImyeYhMYFON-Se}/_buildManifest.js +1 -1
- sky/dashboard/out/_next/static/chunks/1043-734e57d2b27dfe5d.js +1 -0
- sky/dashboard/out/_next/static/chunks/4869.bdd42f14b51d1d6f.js +16 -0
- sky/dashboard/out/_next/static/chunks/8969-8e0b2055bf5dd499.js +1 -0
- sky/dashboard/out/_next/static/chunks/{9984.b56614f3c4c5961d.js → 9984.2b5e3fa69171bff9.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/clusters/[cluster]/[job]-fa406155b4223d0d.js +11 -0
- sky/dashboard/out/_next/static/chunks/pages/jobs/{[job]-14d404b7dd28502a.js → [job]-c5b357bfd9502fbe.js} +1 -1
- sky/dashboard/out/_next/static/chunks/webpack-6b0575ea521af4f3.js +1 -0
- sky/dashboard/out/clusters/[cluster]/[job].html +1 -1
- sky/dashboard/out/clusters/[cluster].html +1 -1
- sky/dashboard/out/clusters.html +1 -1
- sky/dashboard/out/config.html +1 -1
- sky/dashboard/out/index.html +1 -1
- sky/dashboard/out/infra/[context].html +1 -1
- sky/dashboard/out/infra.html +1 -1
- sky/dashboard/out/jobs/[job].html +1 -1
- sky/dashboard/out/jobs.html +1 -1
- sky/dashboard/out/users.html +1 -1
- sky/dashboard/out/volumes.html +1 -1
- sky/dashboard/out/workspace/new.html +1 -1
- sky/dashboard/out/workspaces/[name].html +1 -1
- sky/dashboard/out/workspaces.html +1 -1
- sky/exceptions.py +5 -0
- sky/execution.py +1 -1
- sky/jobs/client/sdk.py +1 -1
- sky/jobs/server/core.py +14 -0
- sky/provision/kubernetes/utils.py +6 -0
- sky/serve/client/sdk.py +1 -1
- sky/server/common.py +8 -3
- sky/server/rest.py +71 -26
- sky/setup_files/MANIFEST.in +1 -0
- sky/setup_files/dependencies.py +2 -0
- sky/task.py +12 -2
- sky/utils/command_runner.py +144 -35
- sky/utils/controller_utils.py +4 -3
- sky/utils/git.py +9 -0
- sky/utils/git_clone.sh +460 -0
- sky/utils/schemas.py +15 -1
- {skypilot_nightly-1.0.0.dev20250716.dist-info → skypilot_nightly-1.0.0.dev20250718.dist-info}/METADATA +3 -1
- {skypilot_nightly-1.0.0.dev20250716.dist-info → skypilot_nightly-1.0.0.dev20250718.dist-info}/RECORD +60 -57
- sky/dashboard/out/_next/static/chunks/1043-90a88c46f27b3df5.js +0 -1
- sky/dashboard/out/_next/static/chunks/4869.c139c0124e677fc8.js +0 -16
- sky/dashboard/out/_next/static/chunks/8969-743abf4bc86baf48.js +0 -1
- sky/dashboard/out/_next/static/chunks/pages/clusters/[cluster]/[job]-9096ea50b8e2cf9e.js +0 -6
- sky/dashboard/out/_next/static/chunks/webpack-3fad5d4a0541a02d.js +0 -1
- /sky/dashboard/out/_next/static/{gVXjeFhvtWXyOsx9xYNvM → FUjweqdImyeYhMYFON-Se}/_ssgManifest.js +0 -0
- {skypilot_nightly-1.0.0.dev20250716.dist-info → skypilot_nightly-1.0.0.dev20250718.dist-info}/WHEEL +0 -0
- {skypilot_nightly-1.0.0.dev20250716.dist-info → skypilot_nightly-1.0.0.dev20250718.dist-info}/entry_points.txt +0 -0
- {skypilot_nightly-1.0.0.dev20250716.dist-info → skypilot_nightly-1.0.0.dev20250718.dist-info}/licenses/LICENSE +0 -0
- {skypilot_nightly-1.0.0.dev20250716.dist-info → skypilot_nightly-1.0.0.dev20250718.dist-info}/top_level.txt +0 -0
sky/client/cli/git.py
ADDED
@@ -0,0 +1,549 @@
|
|
1
|
+
"""Git utilities for SkyPilot."""
|
2
|
+
import enum
|
3
|
+
import os
|
4
|
+
import re
|
5
|
+
from typing import List, Optional, Union
|
6
|
+
|
7
|
+
import git
|
8
|
+
import requests
|
9
|
+
|
10
|
+
from sky import exceptions
|
11
|
+
from sky import sky_logging
|
12
|
+
|
13
|
+
logger = sky_logging.init_logger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class GitRefType(enum.Enum):
|
17
|
+
"""Type of git reference."""
|
18
|
+
|
19
|
+
BRANCH = 'branch'
|
20
|
+
TAG = 'tag'
|
21
|
+
COMMIT = 'commit'
|
22
|
+
|
23
|
+
|
24
|
+
class GitUrlInfo:
|
25
|
+
"""Information extracted from a git URL."""
|
26
|
+
|
27
|
+
def __init__(self,
|
28
|
+
host: str,
|
29
|
+
path: str,
|
30
|
+
protocol: str,
|
31
|
+
user: Optional[str] = None,
|
32
|
+
port: Optional[int] = None):
|
33
|
+
self.host = host
|
34
|
+
# Repository path (e.g., 'user/repo' or 'org/subgroup/repo').
|
35
|
+
# The path is the part after the host.
|
36
|
+
self.path = path
|
37
|
+
# 'https', 'ssh'
|
38
|
+
self.protocol = protocol
|
39
|
+
# SSH username
|
40
|
+
self.user = user
|
41
|
+
self.port = port
|
42
|
+
|
43
|
+
|
44
|
+
class GitCloneInfo:
|
45
|
+
"""Information about a git clone."""
|
46
|
+
|
47
|
+
def __init__(self,
|
48
|
+
url: str,
|
49
|
+
envs: Optional[dict] = None,
|
50
|
+
token: Optional[str] = None,
|
51
|
+
ssh_key: Optional[str] = None):
|
52
|
+
self.url = url
|
53
|
+
self.envs = envs
|
54
|
+
self.token = token
|
55
|
+
self.ssh_key = ssh_key
|
56
|
+
|
57
|
+
|
58
|
+
class GitRepo:
|
59
|
+
"""Git utilities for SkyPilot."""
|
60
|
+
|
61
|
+
def __init__(self,
|
62
|
+
repo_url: str,
|
63
|
+
ref: str = 'main',
|
64
|
+
git_token: Optional[str] = None,
|
65
|
+
git_ssh_key_path: Optional[str] = None):
|
66
|
+
"""Initialize Git utility.
|
67
|
+
|
68
|
+
Args:
|
69
|
+
repo_url: Git repository URL.
|
70
|
+
ref: Git reference (branch, tag, or commit hash).
|
71
|
+
git_token: GitHub token for private repositories.
|
72
|
+
git_ssh_key_path: Path to SSH private key for authentication.
|
73
|
+
"""
|
74
|
+
self.repo_url = repo_url
|
75
|
+
self.ref = ref
|
76
|
+
self.git_token = git_token
|
77
|
+
self.git_ssh_key_path = git_ssh_key_path
|
78
|
+
|
79
|
+
# Parse URL during initialization to catch format errors early
|
80
|
+
self._parsed_url = self._parse_git_url(self.repo_url)
|
81
|
+
|
82
|
+
def _parse_git_url(self, url: str) -> GitUrlInfo:
|
83
|
+
"""Parse git URL into components.
|
84
|
+
|
85
|
+
Supports various git URL formats:
|
86
|
+
- HTTPS: https://github.com/user/repo.git
|
87
|
+
- SSH: git@github.com:user/repo.git (SCP-like)
|
88
|
+
- SSH full: ssh://git@github.com/user/repo.git
|
89
|
+
- SSH with port: ssh://git@github.com:2222/user/repo.git
|
90
|
+
|
91
|
+
Args:
|
92
|
+
url: Git repository URL in any supported format.
|
93
|
+
|
94
|
+
Returns:
|
95
|
+
GitUrlInfo with parsed components.
|
96
|
+
|
97
|
+
Raises:
|
98
|
+
exceptions.GitError: If URL format is not supported.
|
99
|
+
"""
|
100
|
+
# Remove trailing .git if present
|
101
|
+
clean_url = url.rstrip('/')
|
102
|
+
if clean_url.endswith('.git'):
|
103
|
+
clean_url = clean_url[:-4]
|
104
|
+
|
105
|
+
# Pattern for HTTPS/HTTP URLs
|
106
|
+
https_pattern = r'^(https?)://(?:([^@]+)@)?([^:/]+)(?::(\d+))?/(.+)$'
|
107
|
+
https_match = re.match(https_pattern, clean_url)
|
108
|
+
|
109
|
+
if https_match:
|
110
|
+
protocol, user, host, port_str, path = https_match.groups()
|
111
|
+
port = int(port_str) if port_str else None
|
112
|
+
|
113
|
+
# Validate that path is not empty
|
114
|
+
if not path or path == '/':
|
115
|
+
raise exceptions.GitError(
|
116
|
+
f'Invalid repository path in URL: {url}')
|
117
|
+
|
118
|
+
return GitUrlInfo(host=host,
|
119
|
+
path=path,
|
120
|
+
protocol=protocol,
|
121
|
+
user=user,
|
122
|
+
port=port)
|
123
|
+
|
124
|
+
# Pattern for SSH URLs (full format)
|
125
|
+
ssh_full_pattern = r'^ssh://(?:([^@]+)@)?([^:/]+)(?::(\d+))?/(.+)$'
|
126
|
+
ssh_full_match = re.match(ssh_full_pattern, clean_url)
|
127
|
+
|
128
|
+
if ssh_full_match:
|
129
|
+
user, host, port_str, path = ssh_full_match.groups()
|
130
|
+
port = int(port_str) if port_str else None
|
131
|
+
|
132
|
+
# Validate that path is not empty
|
133
|
+
if not path or path == '/':
|
134
|
+
raise exceptions.GitError(
|
135
|
+
f'Invalid repository path in SSH URL: {url}')
|
136
|
+
|
137
|
+
return GitUrlInfo(host=host,
|
138
|
+
path=path,
|
139
|
+
protocol='ssh',
|
140
|
+
user=user,
|
141
|
+
port=port)
|
142
|
+
|
143
|
+
# Pattern for SSH SCP-like format (exclude URLs with ://)
|
144
|
+
scp_pattern = r'^(?:([^@]+)@)?([^:/]+):(.+)$'
|
145
|
+
scp_match = re.match(scp_pattern, clean_url)
|
146
|
+
|
147
|
+
# Make sure it's not a URL with protocol (should not contain ://)
|
148
|
+
if scp_match and '://' not in clean_url:
|
149
|
+
user, host, path = scp_match.groups()
|
150
|
+
|
151
|
+
# Validate that path is not empty
|
152
|
+
if not path:
|
153
|
+
raise exceptions.GitError(
|
154
|
+
f'Invalid repository path in SSH URL: {url}')
|
155
|
+
|
156
|
+
return GitUrlInfo(host=host,
|
157
|
+
path=path,
|
158
|
+
protocol='ssh',
|
159
|
+
user=user,
|
160
|
+
port=None)
|
161
|
+
|
162
|
+
raise exceptions.GitError(
|
163
|
+
f'Unsupported git URL format: {url}. '
|
164
|
+
'Supported formats: https://host/owner/repo, '
|
165
|
+
'ssh://user@host/owner/repo, user@host:owner/repo')
|
166
|
+
|
167
|
+
def get_https_url(self, with_token: bool = False) -> str:
|
168
|
+
"""Get HTTPS URL for the repository.
|
169
|
+
|
170
|
+
Args:
|
171
|
+
with_token: If True, includes token in URL for authentication
|
172
|
+
|
173
|
+
Returns:
|
174
|
+
HTTPS URL string.
|
175
|
+
"""
|
176
|
+
port_str = f':{self._parsed_url.port}' if self._parsed_url.port else ''
|
177
|
+
path = self._parsed_url.path
|
178
|
+
# Remove .git suffix if present (but not individual characters)
|
179
|
+
if path.endswith('.git'):
|
180
|
+
path = path[:-4]
|
181
|
+
|
182
|
+
if with_token and self.git_token:
|
183
|
+
return f'https://{self.git_token}@{self._parsed_url.host}' \
|
184
|
+
f'{port_str}/{path}.git'
|
185
|
+
return f'https://{self._parsed_url.host}{port_str}/{path}.git'
|
186
|
+
|
187
|
+
def get_ssh_url(self) -> str:
|
188
|
+
"""Get SSH URL for the repository in full format.
|
189
|
+
|
190
|
+
Returns:
|
191
|
+
SSH URL string in full format.
|
192
|
+
"""
|
193
|
+
# Use original user from URL, or default to 'git'
|
194
|
+
ssh_user = self._parsed_url.user or 'git'
|
195
|
+
port_str = f':{self._parsed_url.port}' if self._parsed_url.port else ''
|
196
|
+
path = self._parsed_url.path
|
197
|
+
# Remove .git suffix if present (but not individual characters)
|
198
|
+
if path.endswith('.git'):
|
199
|
+
path = path[:-4]
|
200
|
+
return f'ssh://{ssh_user}@{self._parsed_url.host}{port_str}/{path}.git'
|
201
|
+
|
202
|
+
def get_repo_clone_info(self) -> GitCloneInfo:
|
203
|
+
"""Validate the repository access with comprehensive authentication
|
204
|
+
and return the appropriate clone info.
|
205
|
+
|
206
|
+
This method implements a sequential validation approach:
|
207
|
+
1. Try public access (no authentication)
|
208
|
+
2. If has token and URL is https, try token access
|
209
|
+
3. If URL is ssh, try ssh access with user provided ssh key or
|
210
|
+
default ssh credential
|
211
|
+
|
212
|
+
Returns:
|
213
|
+
GitCloneInfo instance with successful access method.
|
214
|
+
|
215
|
+
Raises:
|
216
|
+
exceptions.GitError: If the git URL format is invalid or
|
217
|
+
the repository cannot be accessed.
|
218
|
+
"""
|
219
|
+
logger.debug(f'Validating access to {self._parsed_url.host}'
|
220
|
+
f'/{self._parsed_url.path}')
|
221
|
+
|
222
|
+
# Step 1: Try public access first (most common case)
|
223
|
+
try:
|
224
|
+
https_url = self.get_https_url()
|
225
|
+
logger.debug(f'Trying public HTTPS access to {https_url}')
|
226
|
+
|
227
|
+
# Use /info/refs endpoint to check public access.
|
228
|
+
# This is more reliable than git ls-remote as it doesn't
|
229
|
+
# use local git config.
|
230
|
+
stripped_url = https_url.rstrip('/')
|
231
|
+
info_refs_url = f'{stripped_url}/info/refs?service=git-upload-pack'
|
232
|
+
|
233
|
+
# Make a simple HTTP request without any authentication
|
234
|
+
response = requests.get(
|
235
|
+
info_refs_url,
|
236
|
+
timeout=10,
|
237
|
+
allow_redirects=True,
|
238
|
+
# Ensure no local credentials are used
|
239
|
+
auth=None)
|
240
|
+
|
241
|
+
if response.status_code == 200:
|
242
|
+
logger.info(
|
243
|
+
f'Successfully validated repository {https_url} access '
|
244
|
+
'using public access')
|
245
|
+
return GitCloneInfo(url=https_url)
|
246
|
+
except Exception as e: # pylint: disable=broad-except
|
247
|
+
logger.debug(f'Public access failed: {str(e)}')
|
248
|
+
|
249
|
+
# Step 2: Try with token if provided
|
250
|
+
if self.git_token and self._parsed_url.protocol == 'https':
|
251
|
+
try:
|
252
|
+
https_url = self.get_https_url()
|
253
|
+
auth_url = self.get_https_url(with_token=True)
|
254
|
+
logger.debug(f'Trying token authentication to {https_url}')
|
255
|
+
git_cmd = git.cmd.Git()
|
256
|
+
git_cmd.ls_remote(auth_url)
|
257
|
+
logger.info(
|
258
|
+
f'Successfully validated repository {https_url} access '
|
259
|
+
'using token authentication')
|
260
|
+
return GitCloneInfo(url=https_url, token=self.git_token)
|
261
|
+
except Exception as e:
|
262
|
+
logger.info(f'Token access failed: {str(e)}')
|
263
|
+
raise exceptions.GitError(
|
264
|
+
f'Failed to access repository {self.repo_url} using token '
|
265
|
+
'authentication. Please verify your token and repository '
|
266
|
+
f'access permissions. Original error: {str(e)}') from e
|
267
|
+
|
268
|
+
# Step 3: Try SSH access with available keys
|
269
|
+
if self._parsed_url.protocol == 'ssh':
|
270
|
+
try:
|
271
|
+
ssh_url = self.get_ssh_url()
|
272
|
+
|
273
|
+
# Get SSH key info using the combined method
|
274
|
+
ssh_key_info = self._get_ssh_key_info()
|
275
|
+
|
276
|
+
if ssh_key_info:
|
277
|
+
key_path, key_content = ssh_key_info
|
278
|
+
git_ssh_command = f'ssh -F none -i {key_path} ' \
|
279
|
+
'-o StrictHostKeyChecking=no ' \
|
280
|
+
'-o UserKnownHostsFile=/dev/null ' \
|
281
|
+
'-o IdentitiesOnly=yes'
|
282
|
+
ssh_env = {'GIT_SSH_COMMAND': git_ssh_command}
|
283
|
+
|
284
|
+
logger.debug(f'Trying SSH authentication to {ssh_url} '
|
285
|
+
f'with {key_path}')
|
286
|
+
git_cmd = git.cmd.Git()
|
287
|
+
git_cmd.update_environment(**ssh_env)
|
288
|
+
git_cmd.ls_remote(ssh_url)
|
289
|
+
logger.info(
|
290
|
+
f'Successfully validated repository {ssh_url} access '
|
291
|
+
f'using SSH key: {key_path}')
|
292
|
+
return GitCloneInfo(url=ssh_url,
|
293
|
+
ssh_key=key_content,
|
294
|
+
envs=ssh_env)
|
295
|
+
else:
|
296
|
+
raise exceptions.GitError(
|
297
|
+
f'No SSH keys found for {self.repo_url}.')
|
298
|
+
except Exception as e: # pylint: disable=broad-except
|
299
|
+
raise exceptions.GitError(
|
300
|
+
f'Failed to access repository {self.repo_url} using '
|
301
|
+
'SSH key authentication. Please verify your SSH key and '
|
302
|
+
'repository access permissions. '
|
303
|
+
f'Original error: {str(e)}') from e
|
304
|
+
|
305
|
+
# If we get here, no authentication methods are available
|
306
|
+
raise exceptions.GitError(
|
307
|
+
f'Failed to access repository {self.repo_url}. '
|
308
|
+
'If this is a private repository, please provide authentication'
|
309
|
+
f' using either: GIT_TOKEN for token-based access, or'
|
310
|
+
f' GIT_SSH_KEY_PATH for SSH access.')
|
311
|
+
|
312
|
+
def _parse_ssh_config(self) -> Optional[str]:
|
313
|
+
"""Parse SSH config file to find IdentityFile for the target host.
|
314
|
+
|
315
|
+
Returns:
|
316
|
+
Path to SSH private key specified in config, or None if not found.
|
317
|
+
"""
|
318
|
+
ssh_config_path = os.path.expanduser('~/.ssh/config')
|
319
|
+
if not os.path.exists(ssh_config_path):
|
320
|
+
logger.debug('SSH config file ~/.ssh/config does not exist')
|
321
|
+
return None
|
322
|
+
|
323
|
+
try:
|
324
|
+
# Try to use paramiko's SSH config parser if available
|
325
|
+
try:
|
326
|
+
import paramiko # pylint: disable=import-outside-toplevel
|
327
|
+
ssh_config = paramiko.SSHConfig()
|
328
|
+
with open(ssh_config_path, 'r', encoding='utf-8') as f:
|
329
|
+
ssh_config.parse(f)
|
330
|
+
# Get config for the target host
|
331
|
+
host_config = ssh_config.lookup(self._parsed_url.host)
|
332
|
+
|
333
|
+
# Look for identity files in the config
|
334
|
+
identity_files: Union[str, List[str]] = host_config.get(
|
335
|
+
'identityfile', [])
|
336
|
+
if not isinstance(identity_files, list):
|
337
|
+
identity_files = [identity_files]
|
338
|
+
|
339
|
+
# Find the first existing identity file
|
340
|
+
for identity_file in identity_files:
|
341
|
+
key_path = os.path.expanduser(identity_file)
|
342
|
+
if os.path.exists(key_path):
|
343
|
+
logger.debug(f'Found SSH key in config for '
|
344
|
+
f'{self._parsed_url.host}: {key_path}')
|
345
|
+
return key_path
|
346
|
+
|
347
|
+
logger.debug(f'No valid SSH keys found in config for host: '
|
348
|
+
f'{self._parsed_url.host}')
|
349
|
+
return None
|
350
|
+
|
351
|
+
except ImportError:
|
352
|
+
logger.debug('paramiko not available')
|
353
|
+
return None
|
354
|
+
|
355
|
+
except Exception as e: # pylint: disable=broad-except
|
356
|
+
logger.debug(f'Error parsing SSH config: {str(e)}')
|
357
|
+
return None
|
358
|
+
|
359
|
+
def _get_ssh_key_info(self) -> Optional[tuple]:
|
360
|
+
"""Get SSH key path and content using comprehensive strategy.
|
361
|
+
|
362
|
+
Strategy:
|
363
|
+
1. Check provided git_ssh_key_path if given
|
364
|
+
2. Check SSH config for host-specific IdentityFile
|
365
|
+
3. Search for common SSH key types in ~/.ssh/ directory
|
366
|
+
|
367
|
+
Returns:
|
368
|
+
Tuple of (key_path, key_content) if found, None otherwise.
|
369
|
+
"""
|
370
|
+
# Step 1: Check provided SSH key path first
|
371
|
+
if self.git_ssh_key_path:
|
372
|
+
try:
|
373
|
+
key_path = os.path.expanduser(self.git_ssh_key_path)
|
374
|
+
|
375
|
+
# Validate SSH key before using it
|
376
|
+
if not os.path.exists(key_path):
|
377
|
+
raise exceptions.GitError(
|
378
|
+
f'SSH key not found at path: {self.git_ssh_key_path}')
|
379
|
+
|
380
|
+
# Check key permissions
|
381
|
+
key_stat = os.stat(key_path)
|
382
|
+
if key_stat.st_mode & 0o077:
|
383
|
+
logger.warning(
|
384
|
+
f'SSH key {key_path} has too open permissions. '
|
385
|
+
f'Recommended: chmod 600 {key_path}')
|
386
|
+
|
387
|
+
# Check if it's a valid private key and read content
|
388
|
+
with open(key_path, 'r', encoding='utf-8') as f:
|
389
|
+
key_content = f.read()
|
390
|
+
if not (key_content.startswith('-----BEGIN') and
|
391
|
+
'PRIVATE KEY' in key_content):
|
392
|
+
raise exceptions.GitError(
|
393
|
+
f'SSH key {key_path} is invalid.')
|
394
|
+
|
395
|
+
logger.debug(f'Using provided SSH key: {key_path}')
|
396
|
+
return (key_path, key_content)
|
397
|
+
except Exception as e: # pylint: disable=broad-except
|
398
|
+
raise exceptions.GitError(
|
399
|
+
f'Validate provided SSH key error: {str(e)}') from e
|
400
|
+
|
401
|
+
# Step 2: Check SSH config for host-specific configuration
|
402
|
+
config_key_path = self._parse_ssh_config()
|
403
|
+
if config_key_path:
|
404
|
+
try:
|
405
|
+
with open(config_key_path, 'r', encoding='utf-8') as f:
|
406
|
+
key_content = f.read()
|
407
|
+
logger.debug(f'Using SSH key from config: {config_key_path}')
|
408
|
+
return (config_key_path, key_content)
|
409
|
+
except Exception as e: # pylint: disable=broad-except
|
410
|
+
logger.debug(f'Could not read SSH key: {str(e)}')
|
411
|
+
|
412
|
+
# Step 3: Search for default SSH keys
|
413
|
+
ssh_dir = os.path.expanduser('~/.ssh')
|
414
|
+
if not os.path.exists(ssh_dir):
|
415
|
+
logger.debug('SSH directory ~/.ssh does not exist')
|
416
|
+
return None
|
417
|
+
|
418
|
+
# Common SSH key file names in order of preference
|
419
|
+
key_candidates = [
|
420
|
+
'id_rsa', # Most common
|
421
|
+
'id_ed25519', # Modern, recommended
|
422
|
+
]
|
423
|
+
|
424
|
+
for key_name in key_candidates:
|
425
|
+
private_key_path = os.path.join(ssh_dir, key_name)
|
426
|
+
|
427
|
+
# Check if both private and public keys exist
|
428
|
+
if not os.path.exists(private_key_path):
|
429
|
+
continue
|
430
|
+
|
431
|
+
# Check private key permissions
|
432
|
+
try:
|
433
|
+
key_stat = os.stat(private_key_path)
|
434
|
+
if key_stat.st_mode & 0o077:
|
435
|
+
logger.warning(
|
436
|
+
f'SSH key {private_key_path} has too open permissions. '
|
437
|
+
f'Consider: chmod 600 {private_key_path}')
|
438
|
+
|
439
|
+
# Validate private key format and read content
|
440
|
+
with open(private_key_path, 'r', encoding='utf-8') as f:
|
441
|
+
key_content = f.read()
|
442
|
+
if not (key_content.startswith('-----BEGIN') and
|
443
|
+
'PRIVATE KEY' in key_content):
|
444
|
+
logger.debug(f'SSH key {private_key_path} is invalid.')
|
445
|
+
continue
|
446
|
+
|
447
|
+
logger.debug(f'Discovered default SSH key: {private_key_path}')
|
448
|
+
return (private_key_path, key_content)
|
449
|
+
|
450
|
+
except Exception as e: # pylint: disable=broad-except
|
451
|
+
logger.debug(
|
452
|
+
f'Error checking SSH key {private_key_path}: {str(e)}')
|
453
|
+
continue
|
454
|
+
|
455
|
+
logger.debug('No suitable SSH keys found')
|
456
|
+
return None
|
457
|
+
|
458
|
+
def get_ref_type(self) -> GitRefType:
|
459
|
+
"""Get the type of the reference.
|
460
|
+
|
461
|
+
Returns:
|
462
|
+
GitRefType.COMMIT if it's a commit hash,
|
463
|
+
GitRefType.BRANCH if it's a branch,
|
464
|
+
GitRefType.TAG if it's a tag.
|
465
|
+
|
466
|
+
Raises:
|
467
|
+
exceptions.GitError: If the reference is invalid.
|
468
|
+
"""
|
469
|
+
clone_info = self.get_repo_clone_info()
|
470
|
+
git_cmd = git.cmd.Git()
|
471
|
+
if clone_info.envs:
|
472
|
+
git_cmd.update_environment(**clone_info.envs)
|
473
|
+
|
474
|
+
try:
|
475
|
+
# Get all remote refs
|
476
|
+
refs = git_cmd.ls_remote(clone_info.url).split('\n')
|
477
|
+
|
478
|
+
# Collect all commit hashes from refs
|
479
|
+
all_commit_hashes = set()
|
480
|
+
|
481
|
+
# Check if it's a branch or tag name
|
482
|
+
for ref in refs:
|
483
|
+
if not ref:
|
484
|
+
continue
|
485
|
+
hash_val, ref_name = ref.split('\t')
|
486
|
+
|
487
|
+
# Store the commit hash for later validation
|
488
|
+
all_commit_hashes.add(hash_val)
|
489
|
+
|
490
|
+
# Check if it's a branch
|
491
|
+
if ref_name.startswith(
|
492
|
+
'refs/heads/') and ref_name[11:] == self.ref:
|
493
|
+
return GitRefType.BRANCH
|
494
|
+
|
495
|
+
# Check if it's a tag
|
496
|
+
if ref_name.startswith(
|
497
|
+
'refs/tags/') and ref_name[10:] == self.ref:
|
498
|
+
return GitRefType.TAG
|
499
|
+
|
500
|
+
# If we get here, it's not a branch or tag name
|
501
|
+
# Check if it looks like a commit hash (hex string)
|
502
|
+
if len(self.ref) >= 4 and all(
|
503
|
+
c in '0123456789abcdef' for c in self.ref.lower()):
|
504
|
+
# First check if it's a complete match with any known commit
|
505
|
+
if self.ref in all_commit_hashes:
|
506
|
+
logger.debug(f'Found exact commit hash match: {self.ref}')
|
507
|
+
return GitRefType.COMMIT
|
508
|
+
|
509
|
+
# Check if it's a prefix match with any known commit
|
510
|
+
matching_commits = [
|
511
|
+
h for h in all_commit_hashes if h.startswith(self.ref)
|
512
|
+
]
|
513
|
+
if len(matching_commits) == 1:
|
514
|
+
logger.debug(
|
515
|
+
f'Found commit hash prefix match: {self.ref} -> '
|
516
|
+
f'{matching_commits[0]}')
|
517
|
+
return GitRefType.COMMIT
|
518
|
+
elif len(matching_commits) > 1:
|
519
|
+
# Multiple matches - ambiguous
|
520
|
+
raise exceptions.GitError(
|
521
|
+
f'Ambiguous commit hash {self.ref!r}. '
|
522
|
+
f'Multiple commits match: '
|
523
|
+
f'{", ".join(matching_commits[:5])}...')
|
524
|
+
|
525
|
+
# If no match found in ls-remote output, we can't verify
|
526
|
+
# the commit exists. This could be a valid commit that's
|
527
|
+
# not at the tip of any branch/tag. We'll assume it's valid
|
528
|
+
# if it looks like a commit hash and let git handle validation
|
529
|
+
# during clone.
|
530
|
+
logger.debug(f'Commit hash not found in ls-remote output, '
|
531
|
+
f'assuming valid: {self.ref}')
|
532
|
+
logger.warning(
|
533
|
+
f'Cannot verify commit {self.ref} exists - it may be a '
|
534
|
+
'commit in history not at any branch/tag tip')
|
535
|
+
return GitRefType.COMMIT
|
536
|
+
|
537
|
+
# If it's not a branch, tag, or hex string, it's invalid
|
538
|
+
raise exceptions.GitError(
|
539
|
+
f'Git reference {self.ref!r} not found. '
|
540
|
+
'Please provide a valid branch, tag, or commit hash.')
|
541
|
+
|
542
|
+
except git.exc.GitCommandError as e:
|
543
|
+
if not (self.git_token or self.git_ssh_key_path):
|
544
|
+
raise exceptions.GitError(
|
545
|
+
'Failed to check repository. If this is a private '
|
546
|
+
'repository, please provide authentication using either '
|
547
|
+
'GIT_TOKEN or GIT_SSH_KEY_PATH.') from e
|
548
|
+
raise exceptions.GitError(
|
549
|
+
f'Failed to check git reference: {str(e)}') from e
|
sky/client/common.py
CHANGED
@@ -272,7 +272,7 @@ def upload_mounts_to_api_server(dag: 'sky.Dag',
|
|
272
272
|
upload_list = []
|
273
273
|
for task_ in dag.tasks:
|
274
274
|
task_.file_mounts_mapping = {}
|
275
|
-
if task_.workdir:
|
275
|
+
if task_.workdir and isinstance(task_.workdir, str):
|
276
276
|
workdir = task_.workdir
|
277
277
|
assert os.path.isabs(workdir)
|
278
278
|
upload_list.append(workdir)
|
sky/client/sdk.py
CHANGED
@@ -716,7 +716,7 @@ def exec( # pylint: disable=redefined-builtin
|
|
716
716
|
@usage_lib.entrypoint
|
717
717
|
@server_common.check_server_healthy_or_start
|
718
718
|
@annotations.client_api
|
719
|
-
@rest.
|
719
|
+
@rest.retry_transient_errors()
|
720
720
|
def tail_logs(cluster_name: str,
|
721
721
|
job_id: Optional[int],
|
722
722
|
follow: bool,
|
sky/clouds/gcp.py
CHANGED
@@ -1174,7 +1174,7 @@ class GCP(clouds.Cloud):
|
|
1174
1174
|
# These series don't support pd-standard, use pd-balanced for LOW.
|
1175
1175
|
_propagate_disk_type(
|
1176
1176
|
lowest=tier2name[resources_utils.DiskTier.MEDIUM])
|
1177
|
-
if instance_type.startswith('a3-ultragpu'):
|
1177
|
+
if instance_type.startswith('a3-ultragpu') or series == 'n4':
|
1178
1178
|
# a3-ultragpu instances only support hyperdisk-balanced.
|
1179
1179
|
_propagate_disk_type(all='hyperdisk-balanced')
|
1180
1180
|
|
sky/dashboard/out/404.html
CHANGED
@@ -1 +1 @@
|
|
1
|
-
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/219887b94512388c.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/219887b94512388c.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-
|
1
|
+
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/219887b94512388c.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/219887b94512388c.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-6b0575ea521af4f3.js" defer=""></script><script src="/dashboard/_next/static/chunks/framework-efc06c2733009cd3.js" defer=""></script><script src="/dashboard/_next/static/chunks/main-c0a4f1ea606d48d2.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/_app-771a40cde532309b.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/_error-c72a1f77a3c0be1b.js" defer=""></script><script src="/dashboard/_next/static/FUjweqdImyeYhMYFON-Se/_buildManifest.js" defer=""></script><script src="/dashboard/_next/static/FUjweqdImyeYhMYFON-Se/_ssgManifest.js" defer=""></script></head><body><div id="__next"></div><script id="__NEXT_DATA__" type="application/json">{"props":{"pageProps":{"statusCode":404}},"page":"/_error","query":{},"buildId":"FUjweqdImyeYhMYFON-Se","assetPrefix":"/dashboard","nextExport":true,"isFallback":false,"gip":true,"scriptLoader":[]}</script></body></html>
|
sky/dashboard/out/_next/static/{gVXjeFhvtWXyOsx9xYNvM → FUjweqdImyeYhMYFON-Se}/_buildManifest.js
RENAMED
@@ -1 +1 @@
|
|
1
|
-
self.__BUILD_MANIFEST=function(s,c,a,e,t,f,u,n,r,i,
|
1
|
+
self.__BUILD_MANIFEST=function(s,c,a,e,t,f,u,n,r,b,i,j,d,k,o){return{__rewrites:{afterFiles:[],beforeFiles:[],fallback:[]},"/":["static/chunks/pages/index-927ddeebe57a8ac3.js"],"/_error":["static/chunks/pages/_error-c72a1f77a3c0be1b.js"],"/clusters":["static/chunks/pages/clusters-102d169e87913ba1.js"],"/clusters/[cluster]":[s,c,a,e,t,r,i,f,u,n,j,b,d,k,o,"static/chunks/1871-76491ac174a95278.js","static/chunks/pages/clusters/[cluster]-0c37ee1ac5f3474d.js"],"/clusters/[cluster]/[job]":[s,c,a,e,t,f,u,n,"static/chunks/pages/clusters/[cluster]/[job]-fa406155b4223d0d.js"],"/config":["static/chunks/pages/config-a2673b256b6d416f.js"],"/infra":["static/chunks/pages/infra-ae9d2f705ce582c9.js"],"/infra/[context]":["static/chunks/pages/infra/[context]-8b0809f59034d509.js"],"/jobs":["static/chunks/pages/jobs-5bbdc71878f0a068.js"],"/jobs/[job]":[s,c,a,e,t,r,f,u,n,b,"static/chunks/pages/jobs/[job]-c5b357bfd9502fbe.js"],"/users":["static/chunks/pages/users-19e98664bdd61643.js"],"/volumes":["static/chunks/pages/volumes-61ea7ba7e56f8d06.js"],"/workspace/new":["static/chunks/pages/workspace/new-5629d4e551dba1ee.js"],"/workspaces":["static/chunks/pages/workspaces-a1e43d9ef51a9cea.js"],"/workspaces/[name]":[s,c,a,e,t,r,i,f,u,n,j,b,d,k,o,"static/chunks/1141-d8c6404a7c6fffe6.js","static/chunks/pages/workspaces/[name]-7c0187f43757a548.js"],sortedPages:["/","/_app","/_error","/clusters","/clusters/[cluster]","/clusters/[cluster]/[job]","/config","/infra","/infra/[context]","/jobs","/jobs/[job]","/users","/volumes","/workspace/new","/workspaces","/workspaces/[name]"]}}("static/chunks/616-162f3033ffcd3d31.js","static/chunks/5230-df791914b54d91d9.js","static/chunks/5739-5ea3ffa10fc884f2.js","static/chunks/1664-d65361e92b85e786.js","static/chunks/804-9f5e98ce84d46bdd.js","static/chunks/6989-eab0e9c16b64fd9f.js","static/chunks/3698-9fa11dafb5cad4a6.js","static/chunks/9470-b6f6a35283863a6f.js","static/chunks/1272-1ef0bf0237faccdb.js","static/chunks/8969-8e0b2055bf5dd499.js","static/chunks/3947-b059261d6fa88a1f.js","static/chunks/6990-dcb411b566e64cde.js","static/chunks/1043-734e57d2b27dfe5d.js","static/chunks/6601-d4a381403a8bae91.js","static/chunks/938-6a9ffdaa21eee969.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
|
@@ -0,0 +1 @@
|
|
1
|
+
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[1043],{326:function(e,t,a){a.d(t,{$N:function(){return p},Be:function(){return _},Vq:function(){return c},cN:function(){return f},cZ:function(){return d},fK:function(){return g}});var r=a(5893),s=a(7294),o=a(6327),n=a(2350),l=a(3767);let c=o.fC;o.xz;let u=o.h_;o.x8;let i=s.forwardRef((e,t)=>{let{className:a,...s}=e;return(0,r.jsx)(o.aV,{ref:t,className:(0,n.cn)("fixed inset-0 z-50 bg-black/50 backdrop-blur-sm data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",a),...s})});i.displayName=o.aV.displayName;let d=s.forwardRef((e,t)=>{let{className:a,children:s,...c}=e;return(0,r.jsxs)(u,{children:[(0,r.jsx)(i,{}),(0,r.jsxs)(o.VY,{ref:t,className:(0,n.cn)("fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border border-gray-200 bg-white p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg",a),...c,children:[s,(0,r.jsxs)(o.x8,{className:"absolute right-4 top-4 rounded-sm opacity-70 ring-offset-white transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-gray-400 focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-gray-100 data-[state=open]:text-gray-500",children:[(0,r.jsx)(l.Z,{className:"h-4 w-4"}),(0,r.jsx)("span",{className:"sr-only",children:"Close"})]})]})]})});d.displayName=o.VY.displayName;let g=e=>{let{className:t,...a}=e;return(0,r.jsx)("div",{className:(0,n.cn)("flex flex-col space-y-1.5 text-center sm:text-left",t),...a})};g.displayName="DialogHeader";let f=e=>{let{className:t,...a}=e;return(0,r.jsx)("div",{className:(0,n.cn)("flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2",t),...a})};f.displayName="DialogFooter";let p=s.forwardRef((e,t)=>{let{className:a,...s}=e;return(0,r.jsx)(o.Dx,{ref:t,className:(0,n.cn)("text-lg font-semibold leading-none tracking-tight",a),...s})});p.displayName=o.Dx.displayName;let _=s.forwardRef((e,t)=>{let{className:a,...s}=e;return(0,r.jsx)(o.dk,{ref:t,className:(0,n.cn)("text-sm text-gray-500",a),...s})});_.displayName=o.dk.displayName},3266:function(e,t,a){a.d(t,{QL:function(){return g},Sl:function(){return i},getClusters:function(){return c},uR:function(){return u}});var r=a(7294),s=a(5821),o=a(7145),n=a(6378);let l={UP:"RUNNING",STOPPED:"STOPPED",INIT:"LAUNCHING",null:"TERMINATED"};async function c(){let{clusterNames:e=null}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};try{return(await o.x.fetch("/status",{cluster_names:e,all_users:!0})).map(e=>{let t="",a=t=e.zone?e.zone:e.region;return t&&t.length>25&&(t=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:15;if(!e||e.length<=t)return e;if(t<=3)return"...";let a=Math.floor((t-3)/2),r=a+(t-3)%2;return 0===a?e.substring(0,r)+"...":e.substring(0,r)+"..."+e.substring(e.length-a)}(t,25)),{status:l[e.status],cluster:e.name,user:e.user_name,user_hash:e.user_hash,cloud:e.cloud,region:e.region,infra:t?e.cloud+" ("+t+")":e.cloud,full_infra:a?"".concat(e.cloud," (").concat(a,")"):e.cloud,cpus:e.cpus,mem:e.memory,gpus:e.accelerators,resources_str:e.resources_str,resources_str_full:e.resources_str_full,time:new Date(1e3*e.launched_at),num_nodes:e.nodes,workspace:e.workspace,autostop:e.autostop,to_down:e.to_down,jobs:[],command:e.last_creation_command||e.last_use,task_yaml:e.last_creation_yaml||"{}",events:[{time:new Date(1e3*e.launched_at),event:"Cluster created."}]}})}catch(e){return console.error("Error fetching clusters:",e),[]}}async function u(){try{let e=await o.x.fetch("/cost_report",{days:30});console.log("Raw cluster history data:",e);let t=e.map(e=>{let t="Unknown";e.cloud?t=e.cloud:e.resources&&e.resources.cloud&&(t=e.resources.cloud);let a=e.user_name||"-";return{status:e.status?l[e.status]:"TERMINATED",cluster:e.name,user:a,user_hash:e.user_hash,cloud:t,region:"",infra:t,full_infra:t,resources_str:e.resources_str,resources_str_full:e.resources_str_full,time:e.launched_at?new Date(1e3*e.launched_at):null,num_nodes:e.num_nodes||1,duration:e.duration,total_cost:e.total_cost,workspace:e.workspace||"default",autostop:-1,to_down:!1,cluster_hash:e.cluster_hash,usage_intervals:e.usage_intervals,command:e.last_creation_command||"",task_yaml:e.last_creation_yaml||"{}",events:[{time:e.launched_at?new Date(1e3*e.launched_at):new Date,event:"Cluster created."}]}});return console.log("Processed cluster history data:",t),t}catch(e){return console.error("Error fetching cluster history:",e),[]}}async function i(e){let{clusterName:t,jobId:a,onNewLog:r,workspace:n}=e;try{await o.x.stream("/logs",{follow:!1,cluster_name:t,job_id:a,override_skypilot_config:{active_workspace:n||"default"}},r)}catch(e){console.error("Error in streamClusterJobLogs:",e),(0,s.C)("Error in streamClusterJobLogs: ".concat(e.message),"error")}}async function d(e){let{clusterName:t,workspace:a}=e;try{return(await o.x.fetch("/queue",{cluster_name:t,all_users:!0,override_skypilot_config:{active_workspace:a}})).map(e=>{var r;let s=e.end_at?e.end_at:Date.now()/1e3,o=0,n=0;return e.submitted_at&&(o=s-e.submitted_at),e.start_at&&(n=s-e.start_at),{id:e.job_id,status:e.status,job:e.job_name,user:e.username,user_hash:e.user_hash,gpus:e.accelerators||{},submitted_at:e.submitted_at?new Date(1e3*e.submitted_at):null,resources:e.resources,cluster:t,total_duration:o,job_duration:n,infra:"",logs:"",workspace:a||"default",git_commit:(null===(r=e.metadata)||void 0===r?void 0:r.git_commit)||"-"}})}catch(e){return console.error("Error fetching cluster jobs:",e),[]}}function g(e){let{cluster:t,job:a=null}=e,[s,o]=(0,r.useState)(null),[l,u]=(0,r.useState)(null),[i,g]=(0,r.useState)(!0),[f,p]=(0,r.useState)(!0),_=(0,r.useCallback)(async()=>{if(t)try{g(!0);let e=await n.default.get(c,[{clusterNames:[t]}]);return o(e[0]),e[0]}catch(e){console.error("Error fetching cluster data:",e)}finally{g(!1)}return null},[t]),m=(0,r.useCallback)(async e=>{if(t)try{p(!0);let a=await n.default.get(d,[{clusterName:t,workspace:e||"default"}]);u(a)}catch(e){console.error("Error fetching cluster job data:",e)}finally{p(!1)}},[t]),h=(0,r.useCallback)(async()=>{n.default.invalidate(c,[{clusterNames:[t]}]);let e=await _();e&&(n.default.invalidate(d,[{clusterName:t,workspace:e.workspace||"default"}]),await m(e.workspace))},[_,m,t]),b=(0,r.useCallback)(async()=>{s&&(n.default.invalidate(d,[{clusterName:t,workspace:s.workspace||"default"}]),await m(s.workspace))},[m,s,t]);return(0,r.useEffect)(()=>{(async()=>{let e=await _();e&&m(e.workspace)})()},[t,a,_,m]),{clusterData:s,clusterJobData:l,loading:i,clusterDetailsLoading:i,clusterJobsLoading:f,refreshData:h,refreshClusterJobsOnly:b}}},2045:function(e,t,a){a.d(t,{l4:function(){return n}});var r=a(3225),s=a(7145);async function o(e,t){try{let a=[];try{let e=await s.x.get("/enabled_clouds"),t=e.headers.get("X-Skypilot-Request-ID")||e.headers.get("X-Request-ID"),r=await s.x.get("/api/get?request_id=".concat(t)),o=await r.json();a=o.return_value?JSON.parse(o.return_value):[],console.log("Enabled clouds:",a)}catch(e){console.error("Error fetching enabled clouds:",e),a=[]}let o={};r.$m.forEach(e=>{let t=a.includes(e.toLowerCase());o[e]={name:e,clusters:0,jobs:0,enabled:t}}),e.forEach(e=>{if(e.cloud){let t=e.cloud;o[t]&&(o[t].clusters+=1,o[t].enabled=!0)}}),t.forEach(e=>{if(e.cloud){let t=e.cloud;o[t]&&(o[t].jobs+=1,o[t].enabled=!0)}});let n=r.$m.length,l=Object.values(o).filter(e=>e.enabled).length;return{clouds:Object.values(o).filter(e=>e.enabled).sort((e,t)=>e.name.localeCompare(t.name)),totalClouds:n,enabledClouds:l}}catch(e){return console.error("Error fetching cloud infrastructure:",e),{clouds:[],totalClouds:r.$m.length,enabledClouds:0}}}async function n(){let{getClusters:e}=await Promise.resolve().then(a.bind(a,3266)),{getManagedJobs:t}=await Promise.resolve().then(a.bind(a,8969)),r=(await Promise.resolve().then(a.bind(a,6378))).default,[s,n]=await Promise.all([r.get(e),r.get(t,[{allUsers:!0}])]),c=s||[],u=(null==n?void 0:n.jobs)||[],[i,d]=await Promise.all([l(c,u),o(c,u)]);return{gpuData:i,cloudData:d}}async function l(e,t){return await g({clusters:e||[],jobs:t||[]})}async function c(){try{let e=await s.x.post("/realtime_kubernetes_gpu_availability",{context:null,name_filter:null,quantity_filter:null});if(!e.ok)return console.error("Error fetching Kubernetes context GPUs (in getKubernetesContextGPUs): ".concat(e.status," ").concat(e.statusText)),[];let t=e.headers.get("X-Skypilot-Request-ID")||e.headers.get("x-request-id");if(!t)return console.error("No request ID returned for Kubernetes GPU availability (in getKubernetesContextGPUs)"),[];let a=await s.x.get("/api/get?request_id=".concat(t)),r=await a.text();if(500===a.status){try{let e=JSON.parse(r);if(e.detail&&e.detail.error)try{let t=JSON.parse(e.detail.error);console.error("[infra.jsx] getKubernetesContextGPUs: Server error detail:",t.message)}catch(t){console.error("[infra.jsx] getKubernetesContextGPUs: Error parsing server error JSON:",t,"Original error text:",e.detail.error)}}catch(e){console.error("[infra.jsx] getKubernetesContextGPUs: Error parsing 500 error response JSON:",e,"Raw text was:",r)}return[]}let o=JSON.parse(r);return o.return_value?JSON.parse(o.return_value):[]}catch(e){return console.error("[infra.jsx] Outer error in getKubernetesContextGPUs:",e),[]}}async function u(){try{let e=await s.x.get("/all_contexts");if(!e.ok)return console.error("Error fetching all contexts: ".concat(e.status," ").concat(e.statusText)),[];let t=e.headers.get("X-Skypilot-Request-ID")||e.headers.get("x-request-id");if(!t)return console.error("No request ID returned for /all_contexts"),[];let a=await s.x.get("/api/get?request_id=".concat(t)),r=await a.json();return r.return_value?JSON.parse(r.return_value):[]}catch(e){return console.error("[infra.jsx] Error in getAllContexts:",e),[]}}async function i(e){try{let t=await s.x.post("/kubernetes_node_info",{context:e}),a=t.headers.get("X-Skypilot-Request-ID")||t.headers.get("x-request-id"),r=await s.x.get("/api/get?request_id=".concat(a));if(500===r.status){try{let e=await r.json();if(e.detail&&e.detail.error)try{let t=JSON.parse(e.detail.error);console.error("Error fetching Kubernetes per node GPUs:",t.message)}catch(e){console.error("Error parsing JSON:",e)}}catch(e){console.error("Error parsing JSON:",e)}return{}}let o=await r.json();return(o.return_value?JSON.parse(o.return_value):{}).node_info_dict||{}}catch(t){return console.error("[infra.jsx] Error in getKubernetesPerNodeGPUs for context",e,":",t),{}}}async function d(e){try{let t=e.clusters,a=e.jobs,r={};return t.forEach(e=>{let t=null;if("Kubernetes"===e.cloud)(t=e.region)&&(t="kubernetes/".concat(t));else if("SSH"===e.cloud&&(t=e.region)){let e=t.startsWith("ssh-")?t.substring(4):t;t="ssh/".concat(e)}t&&(r[t]||(r[t]={clusters:0,jobs:0}),r[t].clusters+=1)}),a.forEach(e=>{let t=null;if("Kubernetes"===e.cloud)(t=e.region)&&(t="kubernetes/".concat(t));else if("SSH"===e.cloud&&(t=e.region)){let e=t.startsWith("ssh-")?t.substring(4):t;t="ssh/".concat(e)}t&&(r[t]||(r[t]={clusters:0,jobs:0}),r[t].jobs+=1)}),r}catch(e){return console.error("=== Error in getContextClustersAndJobs ===",e),{}}}async function g(e){try{let o=await u();if(!o||0===o.length)return console.log("No contexts found from /all_contexts endpoint."),{allContextNames:[],allGPUs:[],perContextGPUs:[],perNodeGPUs:[],contextStats:{}};let n=await d(e),l=await c(),g=new Map;l&&l.forEach(e=>{g.set(e[0],e[1])});let f={},p={},_={};for(let e of o){p[e]||(p[e]=[]);let o=g.get(e);if(o&&o.length>0)for(let t of o){let a=t[0],r=t[1].join(", "),s=t[2],o=t[3];a in f?(f[a].gpu_total+=s,f[a].gpu_free+=o):f[a]={gpu_total:s,gpu_free:o,gpu_name:a},p[e].push({gpu_name:a,gpu_requestable_qty_per_node:r,gpu_total:s,gpu_free:o,context:e})}let n=await i(e);if(n&&Object.keys(n).length>0)for(let o in n){var t,a,r,s;let l=n[o],c=l.accelerator_type||"-",u=null!==(r=null===(t=l.total)||void 0===t?void 0:t.accelerator_count)&&void 0!==r?r:0,i=null!==(s=null===(a=l.free)||void 0===a?void 0:a.accelerators_available)&&void 0!==s?s:0;_["".concat(e,"/").concat(o)]={node_name:l.name,gpu_name:c,gpu_total:u,gpu_free:i,ip_address:l.ip_address||null,context:e},"-"===c||p[e].some(e=>e.gpu_name===c)||(c in f||(f[c]={gpu_total:0,gpu_free:0,gpu_name:c}),p[e].find(e=>e.gpu_name===c)||p[e].push({gpu_name:c,gpu_requestable_qty_per_node:"-",gpu_total:0,gpu_free:0,context:e}))}0===p[e].length&&n&&Object.keys(n).length}return{allContextNames:o.sort(),allGPUs:Object.values(f).sort((e,t)=>e.gpu_name.localeCompare(t.gpu_name)),perContextGPUs:Object.values(p).flat().sort((e,t)=>e.context.localeCompare(t.context)||e.gpu_name.localeCompare(t.gpu_name)),perNodeGPUs:Object.values(_).sort((e,t)=>e.context.localeCompare(t.context)||e.node_name.localeCompare(t.node_name)||e.gpu_name.localeCompare(t.gpu_name)),contextStats:n}}catch(e){return console.error("[infra.jsx] Outer error in getKubernetesGPUs:",e),{allContextNames:[],allGPUs:[],perContextGPUs:[],perNodeGPUs:[],contextStats:{}}}}},3081:function(e,t,a){a.d(t,{R:function(){return s}}),a(3266),a(8969);var r=a(7145);async function s(){try{let e=await r.x.get("/users");if(!e.ok)throw Error("HTTP error! status: ".concat(e.status));return(await e.json()).map(e=>({userId:e.id,username:e.name,role:e.role,created_at:e.created_at}))||[]}catch(e){return console.error("Failed to fetch users:",e),[]}}},9238:function(e,t,a){a.d(t,{C:function(){return s},w:function(){return o}});var r=a(7145);async function s(){try{return(await r.x.fetch("/volumes",{},"GET")).map(e=>{var t,a,r;let s=e.cloud||"";return e.region&&(s+="/".concat(e.region)),e.zone&&(s+="/".concat(e.zone)),{name:e.name,launched_at:e.launched_at,user_hash:e.user_hash,user_name:e.user_name||"-",workspace:e.workspace||"-",last_attached_at:e.last_attached_at,status:e.status,type:e.type,cloud:e.cloud,region:e.region,zone:e.zone,infra:s,size:"".concat(e.size,"Gi"),config:e.config,storage_class:(null===(t=e.config)||void 0===t?void 0:t.storage_class_name)||"-",access_mode:(null===(a=e.config)||void 0===a?void 0:a.access_mode)||"-",namespace:(null===(r=e.config)||void 0===r?void 0:r.namespace)||"-",name_on_cloud:e.name_on_cloud,usedby_pods:e.usedby_pods,usedby_clusters:e.usedby_clusters}})||[]}catch(e){return console.error("Failed to fetch volumes:",e),[]}}async function o(e){let t="";try{let a=(await r.x.post("/volumes/delete",{names:[e]})).headers.get("X-Request-ID"),s=await r.x.get("/api/get?request_id=".concat(a));if(500===s.status){try{let e=await s.json();if(e.detail&&e.detail.error)try{t=JSON.parse(e.detail.error).message}catch(e){console.error("Error parsing JSON:",e)}}catch(e){console.error("Error parsing JSON:",e)}return{success:!1,msg:t}}return{success:!0}}catch(e){return console.error("Failed to delete volume:",e),{success:!1,msg:e.message}}}},6856:function(e,t,a){var r=a(6378),s=a(3266),o=a(8969),n=a(7324),l=a(3081),c=a(2045),u=a(9238);let i={base:{getClusters:{fn:s.getClusters,args:[]},getClusterHistory:{fn:s.uR,args:[]},getManagedJobs:{fn:o.getManagedJobs,args:[{allUsers:!0}]},getWorkspaces:{fn:n.fX,args:[]},getUsers:{fn:l.R,args:[]},getInfraData:{fn:c.l4,args:[]},getVolumes:{fn:u.C,args:[]}},dynamic:{getEnabledClouds:{fn:n.yz,requiresWorkspaces:!0}},pages:{clusters:["getClusters","getClusterHistory","getWorkspaces","getUsers"],jobs:["getManagedJobs","getClusters","getWorkspaces","getUsers"],infra:["getInfraData","getClusters","getManagedJobs"],workspaces:["getWorkspaces","getClusters","getManagedJobs","getEnabledClouds"],users:["getUsers","getClusters","getManagedJobs"],volumes:["getVolumes"]}};class d{async preloadForPage(e,t){let{backgroundPreload:a=!0,force:r=!1}=t||{};if(!i.pages[e]){console.warn("Unknown page: ".concat(e));return}console.log("[CachePreloader] Preloading cache for page: ".concat(e));try{await this._loadPageData(e,r),a&&this._backgroundPreloadOtherPages(e)}catch(t){console.error("[CachePreloader] Error preloading for page ".concat(e,":"),t)}}async _loadPageData(e){let t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],a=i.pages[e],s=[];for(let e of a)if(i.base[e]){let{fn:a,args:o}=i.base[e];t&&r.default.invalidate(a,o),s.push(r.default.get(a,o))}else"getEnabledClouds"===e&&s.push(this._loadEnabledCloudsForAllWorkspaces(t));await Promise.allSettled(s),console.log("[CachePreloader] Loaded data for page: ".concat(e))}async _loadEnabledCloudsForAllWorkspaces(){let e=arguments.length>0&&void 0!==arguments[0]&&arguments[0];try{e&&r.default.invalidate(n.fX);let t=await r.default.get(n.fX),a=Object.keys(t||{}).map(t=>(e&&r.default.invalidate(n.yz,[t]),r.default.get(n.yz,[t])));await Promise.allSettled(a)}catch(e){console.error("[CachePreloader] Error loading enabled clouds:",e)}}_backgroundPreloadOtherPages(e){if(this.isPreloading)return;this.isPreloading=!0;let t=Object.keys(i.pages).filter(t=>t!==e);console.log("[CachePreloader] Background preloading pages: ".concat(t.join(", "))),Promise.allSettled(t.map(async e=>{try{await this._loadPageData(e,!1),console.log("[CachePreloader] Background loaded: ".concat(e))}catch(t){console.error("[CachePreloader] Background load failed for ".concat(e,":"),t)}})).then(()=>{this.isPreloading=!1,console.log("[CachePreloader] Background preloading complete")})}async preloadBaseFunctions(){let e=arguments.length>0&&void 0!==arguments[0]&&arguments[0];console.log("[CachePreloader] Preloading all base functions");let t=Object.entries(i.base).map(t=>{let[a,{fn:s,args:o}]=t;return e&&r.default.invalidate(s,o),r.default.get(s,o).catch(e=>{console.error("[CachePreloader] Failed to preload ".concat(a,":"),e)})});await Promise.allSettled(t),console.log("[CachePreloader] Base functions preloaded")}getCacheStats(){return{...r.default.getStats(),isPreloading:this.isPreloading}}clearCache(){r.default.clear(),this.isPreloading=!1,this.preloadPromises.clear(),console.log("[CachePreloader] Cache cleared")}constructor(){this.isPreloading=!1,this.preloadPromises=new Map}}let g=new d;t.ZP=g}}]);
|